大数据技术原理与应用(第三版)林子雨教材配套实验答案---实验二 熟悉常用的hdfs操作

该博客详细介绍了如何使用 Hadoop Shell 命令和 Java API 实现HDFS文件系统的各种操作,包括文件上传、下载、查看属性、目录管理、内容输出、文件追加、删除以及移动。同时,还探讨了自定义`MyFSDataInputStream`类以实现按行读取HDFS文件的方法。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

1.编程实现以下指定功能,并利用 Hadoop 提供的 Shell 命令完成相同任务;

1.1 向 HDFS 中上传任意文本文件,如果指定的文件在 HDFS 中已经存在,由用户指定是追加到原有文件末尾还是覆盖原有的文件;

shell

#检查文件是否存在
hdfs dfs -test -e /hdfstestfile.txt
#查看结果,0表示存在,1表示不存在
echo $? 
#文件已经存在,追加到原文件末尾
hdfs dfs -appendTOFile localtestfile.txt /hdfstestfile.txt
#文件已经存在,追加到原文件末尾
hdfs dfs -copyFromLocal -f localtestfile.txt /hdfstestfile.txt

java

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class AddFile_0101{

	/**
	*判断路径是否存在
	*/
	public static boolean test(Configuration conf, String path) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		returm fs.exists(new Path(path));
	}

	/**
	*复制文件到指定路径
	*若路径已存在,则进行覆盖
	*/
	public static void copyFromLocalFile(Configuration conf, String localFilePath, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path localPath = new Path(localFilePath);
		Path remotePath = new Path(remoteFilePath);
		//fs.copyFromLocalFile第一个参数表示是否删除源文件,第二个参数表示是否覆盖
		fs.copyFromL ocalFile(false, true, localPath, remotePath);
		fs.close();
	}

	/**
	*追加文件内容
	*/
	public static void appendToFile(Configuration conf, String localFilePath, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		//创建一个文件读入流
		FileInputStream in = new FileInputStream(localFilePath);
		//创建一个文件输出流,输出的内容将追加到文件末尾
		FSDataOutputStream out = fs.append(remotePath);
		//读写文件内容
		byte[] data = new byte[1024];
		int read = -1;
		while ((read = in.read(data))>0){
			out.write(data, 0, read);
		}	
		out.close;
		in.close0;
		fs.close0;
	}	
	
	/**
	*主函数
	*/
	public static void main(String args) {
		Configuration conf = new Configuration0;
		conf.set("fs.default.name","hdfs://localhost:8020");
		//本地路径
		String localFilePath = "/text.txt";
		//hdfs路径
		String remoteFilePath = "/text.txt";
		//若文件存在则追加到文件末尾
		//String choice = "append";
		//若文件存在则覆盖
		String choice = "overwrite";

		try {
			//判断文件是否存在
			Boolean fileExists = false;
			if (AddFile_0101.test(conf, remoteFilePath)){
				fileExists = true;
				System.out.println(remoteFilePath+"已经存在。。。");
			}else {
				System.out.println(remoteFilePath+"不存在。。。");
			}	
			//开始进行处理
			if(!fileExits){
				//文件不存在,则上传
				AddFile_0101.copyFromLocalFile(conf, localFilePath, remoteFilePath);
				System.out.println(localFilePath+"已经上传到" + remoteFilePath);
			}else if ( choice.equals("overwrite")){
				//选择覆盖
				AddFile_0101.copyFromL ocalFile(conf, localFilePath, remoteFilePath);
				System.out.println(localFilePath+"已经覆盖到" + remoteFilePath);
			}else if( choice .equals("append")){
				//选择追加
				AddFile_0101.appendToFile(conf, localFilePath, remoteFilePath);
				System.out.println(localFilePath +" 已经追加到 " + remoteFilePath);
			}
		}catch (Exceptione e){
			e.printStackTrace0;
		}
	}
}

1.2 从 HDFS 中下载指定文件,如果本地文件与要下载的文件名称相同,则自动对下载的文件重命名;

shell

if $(hdfs dfs -test -e file://home/test.txt);
then $(hdfs dfs -copyToLocal test.txt /test2.txt);
else $(hdfs dfs -copyToLocal test.txt /test.txt);
fi

java

import org.apache .hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;

public class HDFSApi_0102 (
	/**
	*下载文件到本地
	*判断本地路径是否已存在,若已存在,则自动进行重命名
	*/
	public static void copyToLocal(Configuration conf, String remoteFilePath, String localFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		File f= new File(localFilePath);
		//如果文件名存在,自动重命名
		if(f.exists()) {
			System.out.println(localFilePath+"已经存在。。。");
			Integeri= 0;
			while (true) {
				f= new File(localFilePath +"_" + i.toStringO);
				if(!f.exists()) {
					localFilePath = localFilePath+ "_" + i.toStringO;
					break;
				}
			}		
			System.out.println("文件将重新命名为:" + localFilePath);
		}
		
		//下载文件到本地
		Path localPath = new Path(localFilePath);
		fs.copyToLocalFile(remotePath, localPath);
		fs.close();
	}
	
	/**
	*主函数
	*/
	public static void main(String] args) {
		Configuration conf = new Configuration0;
		conf.set("fs.default.name","hdfs://localhost:8020");
		String localFilePath = "/home/text.txt";
		String remoteFilePath = "/text.txt";

		try {
			HDFSApi0102.copyToLocal(conf, remoteFilePath, localFilePath);
			System.out.println("下载完成");
		}catch (Exception e) {
			e.printStackTrace();
		}
	}	
}	

1.3 将 HDFS 中指定文件的内容输出到终端中;

shell

hdfs dfs -cat /test.txt

java

import org.apache.hadoop.conf.Configuration;
import org.apache .hadoop.fs.*;
import java.io.*;

publie class HDFSApi_0103 (
	/**
	*读取文件内容
	*/
	public static void cat(Configuration conf, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		FSDataInputStream in = fs.open(remotePath);
		BufferedReader d = new BufferedReader(new InputStreamReader(in));
		String line = null;
		while ( (line = d.readLine) != null) {
			System.out.println(line);
		}	
		d.close();
		in.close();
		fs.close();
	}
	
	/**
	*主函数
	*/
	public static void main(String] args) {
		Configuration conf = new Configuration();
		conf.set("fs.default.name","hdfs://localhost:8020");
		String remoteFilePath = "/user/hadoop/text.txt"; 
		try {
			System.out.println("读取文件: " + remoteFilePath);
			HDFSApi_0103.cat(conf, remoteF ilePath);
			System.out.println("\n 读取完成!!!");
		}catch (Exception e) {
			e.printStackTrace0;
		}
	}
}

1.4 显示 HDFS 中指定的文件的读写权限、大小、创建时间、路径等信息;

shell

hdfs dfs -ls -h /test.txt

java

import org.apache .hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
import java.text.SimpleDateFormat;
public class HDFSApi_0104 {
	/**
	*显示指定文件的信息
	*/
	public static void ls(Configuration conf, String remoteFilePath) throws IOException {
		FileSystem fis = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		FileStatus[] fileStatuses = fs.listStatus(remotePath);
		for (FileStatus s : fileStatuses) {
			System.out.println("路径:" + s.getPath().toString());
			System.out.println("权限:" + s.getPermission().toString());
			System.out.println("大小:" + s.getLen());
			//返回的是时间戳,转化为时间日期格式
			Long timeStamp = s.getModificationTime();
			SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
			String date = format.format(timeStamp);
			System.out.println("时间: " + date);
		}
		fs.close();
	}	
	
	/**
	*主函数
	*/
	public static void main(String[] args) {
		Configuration conf = new Configuration();
		conf.set("fs.default.name","hdfs://localhost:8020");
		String remoteFilePath = "/text.txt";

		try {
			System.out.println("读取文件信息: " + remoteFilePath);
			HDFSApi_0104.ls(conf, remoteFilePath);
			System.out.println("\n 读取完成");
		}catch (Exception e) {
			e.printStackTrace();		
		}
	}
}		

1.5 给定 HDFS 中某一个目录,输出该目录下的所有文件的读写权限、大小、创建时间、路径等信息,如果该文件是目录,则递归输出该目录下所有文件相关信 息;

shell

hdfs dfs -ls -R -h /hadoop

java

import org.apache.hadoop.conf.Configuration;
import org.apache. .hadoop.fs.*;
import java.io.*;
import java.text.SimpleDateFormat;
public class HDFSApi_0105 {
	
	/**
	*显示指定文件夹下所有文件的信息
	*/
	public statíc void lsDir(Configuration conf, String remoteDir) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path dirPath = new Path(remoteDir);
		//递归获取目录下的所有文件
		RemoteIterator<LocatedFileStatus> remoteIterator = fs.listFiles(dirPath, true);

		//输出每个文件的信息
		while (remoteIterator.hasNext()) {
			FileStatus s = remoteIterator .next();
			System.out.println("路径:" + s.getPath().toString());
			System.out.println("权限:" + s.getPermission().toString());
			System.out.println("大小:" + s.getLen());
			//返回的是时间戳,转化为时间日期格式
			Long timeStamp = s.getModificationTime();
			SimpleDateFormat format = new SimpleDateFormat("yyy-MM-dd HH:mm:ss");
			String date = format.format(timeStamp);
			System.out.println("时间:" + date);
			System.out.println0;
		}	
		fs.close();
	}	
	
	/**
	*主函数
	*/
	public static void main(String[] args) {
		Configuration conf= new Configuration();
		conf.set("fs.default.name","hdfs://localhost:8020");
		String remoteDir = "/user/hadoop";

		try {
			System.out.println("(递归)读取目录下所有文件的信息: " + remoteDir);
			HDFSApi_0105.lsDir(conf, remoteDir);
			System.out.println("读取完成");
		}catch (Exception e){
			e.printStackTrace();
		}
	}
}			

1.6 提供一个 HDFS 内的文件的路径,对该文件进行创建和删除操作。如果文件所在目录不存在,则自动创建目录;

shell

$ if $(hdfs dfs -test -d dir1/dir2);
$ then $(hdfs dfs -touchz dir1/dir2/filename);
$ else $(hdfs dfs -mkdir -p dir1/dir2 && hdfs dfs -touchz dir1/dir2/filename);
$ fi
#删除文件
$ hdfs dfs -rm dirl/dir2/filename

java

import org.apache.hadoop.conf.Configuration;
import org.apache .hadoop.fs.*;
import java.io.*;
public class HDFSApi_0106 {
	/**
	*判断路径是否存在
	*/
	public static boolean test(Configuration conf, String path) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		return fs.exists(new Path(path));
	}

	/*
	*创建目录
	*/
	public static boolean mkdir(Configuration conf, String remoteDir) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path dirPath = new Path(remoteDir);
		boolean result = fs.mkdirs(dirPath);
		fs.close();
		return result;
	}

	/**
	*创建文件
	*/
	public static void touchz(Configuration conf, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		FSDataOutputStream outputStream = fs ,create(remotePath);
		outputStream.close();
		fs.close();
	}

	/**
	*删除文件
	*/	
	public static boolean rm(Configuration conf, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		boolean result = fs. delete(remotePath, false);
		fs.close();
		return result;
	}

	/*
	*主函数
	*/
	public static void main(String[] args) {
		Configuration conf = new Configuration();
		conf.set("fs.default.name","hdfs:/localhost:8020");
		String remoteFilePath = "/user/hadoop/input/text.txt"; 
		String remoteDir = "/user/hadoop/input";
		
		try {
			//判断路径是否存在,存在则删除,否则进行创建
			if( HDFSApi_0106.test(conf, remoteFilePath)) {
				HDFSApi_0106.rm(conf, remoteFilePath); 11 H4F€
				System.out.println("删除路径:" + remoteFilePath);
			}else {
				if( !HDFSApi_0106.test(conf, remoteDir)){
					//目录不存在进行创建
					HDFSApi_0106.mkdir(conf, remoteDir);
					System.out.println("创建文件夹:" + remoteDir);
				}	
				HDFSApi_0106. touchz(conf, remoteFilePath);
				System.out.println("创建路径:" + remoteFilePath);
			}
		}catch (Exception e) {
			e.printStackTrace();
		}
	}
}		

1.7 提供一个 HDFS 的目录的路径,对该目录进行创建和删除操作。创建目录时, 如果目录文件所在目录不存在则自动创建相应目录;删除目录时,由用户指定 当该目录不为空时是否还删除该目录;

shell

#创建目录
hdfs dfs -mkdir -p
#删除目录
hdfs dfs -rm dir
#强制删除目录
hdfs dfs -rm -R dir

java

import org.apache.hadoop.conf.Configuration;import org.apache. .hadoop.fs.*;import java.io.*;public class HDFSApi_0107 {		/**	*判断路径是否存在	*/ 	public static boolean test(Configuration conf, String path) throws IOException {		FileSystem fs = FileSystem.get(conf);		return fs.exists(new Path(path));	}	/**	*判断目录是否为空	*true:空,false: 非空	*/	public static boolean isDirEmpty(Configuration conf, String remoteDir) throws IOException {		FileSystem fs = FileSystem.get(conf);		Path dirPath = new Path(remoteDir);		Remotelterator<LocatedFileStatus> remotelterator = fs.listFiles(dirPath, true);		return !remotelterator.hasNext();	}	/**	*创建目录	*/	public static boolean mkdir(Configuration conf, String remoteDir) throws IOException {		FileSystem fs = FileSystem.get(conf);		Path dirPath = new Path(remoteDir);		boolean result = fs.mkdirs(dirPath);		fs.close();		return result;	}			/**	*删除目录	*/ 	public static boolean rmDir(Configuration conf, String remoteDir) throws IOException {		FileSystem fs = FileSystem.get(conf);			Path dirPath = new Path(remoteDir);		//第二个参数表示是否递归删除所有文件		boolean result = fs.delete(dirPath, true);		fs.close();		return result;	}		/**	*主函数	*/	public static void main(String[] args) {		Configuration conf = new Configuration();		conf.set("fs.default.name","hdfs:/localhost:8020");		String remoteDir = "/user/hadoop/input";		//是否强制删除		Boolean forceDelete = false; 		try {			//判断目录是否存在,不存在则创建,存在则删除			if( !HDFSApi_0107.test(conf, remoteDir) ) {				HDFSApi_0107.mkdir(conf, remoteDir);				System.out.println("创建目录:" + remoteDir);			} else {				if( HDFSApi_0107.isDirEmpty(conf, remoteDir) || forceDelete) {					HDFSApi_0107.rmDir(conf, remoteDir);					System.out.println("删除目录" + remoteDir);				}else {					// 目录不为空					Systerm.out.println("目录不为空,不删除: " + remoteDir);				}			}		}catch (Exception e) {			e.printStackTrace();		}	}}				

1.8 向 HDFS 中指定的文件追加内容,由用户指定内容追加到原有文件的开头或结尾;

shell

hdfs dfs -appendToFile local.txt test.txt
hdfs dfs -get test.txt
cat test.txt>>local.txt
hdfs dfs -copyFromLocal -f local.txt test.txt

java

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi_0108 {
	
	/*
	*判断路径是否存在
	*/
	public static boolean test(Configuration conf, String path) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		return fs.exists(new Path(path));
	}	
	/**
	*追加文本内容
	*/
	public static void appendContentToFile(Configuration conf, String content, String remoteFilePath) throws OException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		/*创建一个文件输出流,输出的内容将追加到文件末尾*/
		FSDataOutputStream out = fs. append(remotePath);
		out.write(content.getBytes0);
		out.close();
		fs.close();
	}	
	/**
	*追加文件内容
	*/
	public static void appendToFile(Configuration conf, String localFilePath, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		/*创建一个文件读入流*/
		FileInputStream in = new FileInputStream(localFilePath);
		/*创建一个文件输出流,输出的内容将追加到文件末尾*/
		FSDataOutputStream out = fs.append(remotePath);
		/*读写文件内容*/
		byte data = new byte(1024);
		int read =-l;
		while ( (read = in.read(data))>0) {
			out.write(data, 0, read);
		}	
		out.close();
		in.close();
		fs.close();
	}	
	/**
	*移动文件到本地
	*移动后,删除源文件
	*/
	public static void moveToL ocalFile(Configuration conf, String remoteFilePath, String localFilePath) throws IOException {
		FileSystem fis = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		Path localPath = new Path(localFilePath);
		fs.moveToLocalFile(remotePath, localPath);
	}	
	/**
	*创建文件
	*/
	public static void touchz(Configuration conf, String remoteFilePath) throws IOException {
		FileSystem fis = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		FSDataOutputStream outputStream = fs.create(remotePath);
		outputStream.close();
		fs.close();
	}	
	/**
	*主函数
	*/ 
	public static void main(String[] args) {
		Configuration conf = new Configuration0;
		conf.set("fs. default.name","dfs://localhost:8020");
		String remoteFilePath = "/user/hadoop/text.txt";
		String content = "新追加的内容\n";
		String choice = "after";
		//追加到文件末尾
		//String choice = "before";
		//追加到文件开头
		try {
			/*判断文件是否存在*/ 
			if( !HDFSApi_0108.test(conf, remoteFilePath)) {
				System.out.println("文件不存在:" + remoteFilePath);
			}else {
				if( choice.equals("after")){
					HDFSApi_0108.appendContentToFile(conf, content, remoteFilePath);
					System.out.printn("已追加内容到文件末尾" + remotcFilePath);
				}else if( choice equals("before")) {
					/*没有相应的api可以直接操作,因此先把文件移动到本地*/
					/*创建-一个新的HDFS,再按顺序追加内容*/
					String localTmpPath = "/user/hadoop/tmp.txt";
					HDFSApi_0108.moveToLocalFile(conf, remoteFilePath, localTmpPath);
					//创建一个新文件
					HDFSApi_0108.touchz(conf, remoteFilePath);
					//先写入新内容
					HDFSApi_0108.appendContentToFile(conf, content, remoteFilePath);
					//再写入原来内容
					HDFSApi_0108.appendToFile(conf, localTmpPath, remoteFilePath);
					System.out.printn("已追加内容到文件开头: " + remoteFilePath);
				}
			}
		}catch (Exception e) {
			e.printStackTrace();
		}
	}
}			

1.9 删除 HDFS 中指定的文件;

shell

hdfs dfs -rm test.txt

java

import org.apache. .hadoop.conf.Configuration;
import org.apache.hadoop.fs.*; 
import java.io.*;
public class HDFSApi {
	/**
	*删除文件
	*/
	public static boolean rm(Configuration conf, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotePath = new Path(remoteFilePath);
		boolean result = fs.delete(remotePath, false);
		fs.close();
		return result;
	}	

	/**
	* 主函数
	*/
	public static void main(String args) {
		Configuration conf = new Configuration0;
		conf.set("fs.default.name","hdfs://localhost:8020");
		String remoteFilePath = "/user/hadoop/text.txt";
		try {
			if( HDFSApi.rm(conf, remoteFilePath)) {
			System.out.println("文件删除:" + remoteFilePath);
			} else {
				System.out.println("操作失败(文件不存在或删除失败) ");
			}
		}catch (Exception e) {
			e.printStackTrace();
		}	
	}
}		

1.10 移动hdfs文件到指定路径

shell

hdfs dfs -mv test.txt dir/test.txt

java

import org.apache .hadoop.conf.Configuration;
import org. apache .hadoop.fs.*;
import java.io.*;
public class HDFSApi {
	/**
	*移动文件
	*/
	public static boolean mv(Configuration conf,String remoteFilePath,String remoteToFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path srcPath = new Path(remoteFilePath);
		Path dstPath = new Path(remoteToFilePath);
		boolean result = fs.rename(srcPath, dstPath);
		fs.close0;
		return result;
	}	
	/**
	*主函数
	*/
	public static void main(String[] args) {
		Configuration conf = new Configuration0;
		conf.set("fs. default.name","dfs://localhost:8020");
		String remoteFilePath = "hfds:///user/hadoop/text.txt";
		//源文件HDFS路径
		String remoteToFilePath = "hdfs://user/hadoop/new.txt";
		
		try {
			if( HDFSApi.mv(conf, remoteFilePath, remote ToFilePath)) {
				System.out.println("将文件移动到:" + remoteFilePath+"移动到:" + remoteToFilePath);
			}else {
				System.out.printIn("操作失败(源文件不存在或移动失败)");
			}	
		}catch (Exception e) {
			e.printStackTrace();
		}
	}
}

2.编程实现一个类“MyFSDataInputStream”,该类继承“org.apache.hadoop.fs.FSDataInput Stream”,要求如下:实现按行读取 HDFS 中指定文件的方法“readLine()”,如果读到文件末尾,则返回空,否则返回文件一行的文本。

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDatalnputStream;
import org.apache .hadoop.fs.FileSystem;
import org.apache .hadoop.fs .Path;
import java.io.*;
public class MyFSDataInputStream extends FSDataInputStream (
	public MyFSDataInputStream(InputStream in){
		super(in); 
	}
	/**
	*实现按行读取
	*每次读入-一个字符,遇到"\n"结束, 返回一行内容
	*/
	public static String readline(BufferedReader br) throws IOException {
		char[] data = new char[ 1024];
		int read = -1;
		int off= 0;
		// 循环执行时,br每次会从上- -次读取结束的位置继续读取
		//因此该函数里,off 每次都从0开始
		while ( (read = br.read(data, off, 1)) != -1 ) {
			if (String.valueOf(ata[f]).equals<("\n") ) {
				off += 1;
				break;
			}
			off += 1;
		}
		if (off> 0) {
			return String.valueOf(data);
		} else {
			return null;
		}
	}
	/**		
	*读取文件内容
	*/
	public static void cat(Configuration conf, String remoteFilePath) throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path remotcPath = new Path(remoteFilePath);
		FSDataInputStream in = fs.open(remotePath);
		BufferedReader br = new BufferedReader(new InputStreamReader(in));
		String line = null;
		while ( (line = MyFSDatalnputStream.readline(br)) != null ) {
			System.out.printn(line);
		}	
		br.close();
		in.close();
		fs.closeO;
	}
	/**	
	*主函数
	*/
	public static void main(String[] args) {
		Configuration conf = new Configuration0;
		conf.set("fs.default.name" ,"hdfs://ocalhost:9000");
		String remoteFilePath = "/user/hadoop/text.txt";
		try {
			MyFSDataInputStream.cat(conf, remoteFilePath);
		} catch (Exception e){
			e.printStackTrace();	
		}
	}
}

3.java .net

public class FsUrl{
	static {
		URL . setURLSt reamHandlerFactory(new FsUrlSt reamHandlerFactory());
	}
	public static void cat(String remoteFilePath) {
		try (InputStream in = new URL ("hdfs", "localhost", 9000, remoteFilePath).openStream()) {
			IOUtils.copyBytes(in, System.out, 4096, false) ;
			I0Utils.closeStream(in) ;
		} catch (I0Exception e) {
			e. printStackTrace();
		}
  }   
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值