Hadoop分布式集群 EclipseException in thread "main" org.apache.hadoop.mapreduce.lib.input.InvalidInputExce

本文分享了在Ubuntu上配置Eclipse与Hadoop的过程,并解决了运行去重案例时出现的输入路径不存在错误。通过正确设置输入输出路径,成功运行了案例。

最近使用Hadoop分布式集群,今天在Ubuntu上将Eclipse和Hadoop配置好了,然后运行一个去重的案例,我将输入文件传到了hdfs文件系统,但是过程中出现了Exception in thread "main" org.apache.hadoop.mapreduce.lib.input.InvalidInputException: Input path does not exist: 这个错误,显示从项目位置中查找输入文件,结果报错了,解决办法当然就是讲输入输出文件地址好好写了呗,代码如下:

// 设置输入和输出目录
FileInputFormat.addInputPath(job, new Path("hdfs://Master:9000/user/ycl1/input/inputfile1"));
FileOutputFormat.setOutputPath(job, new Path("hdfs://Master:9000/user/ycl1/output"));

重新运行一下,OK了。

这个路径是根据core-site.xml里面的fs.default.name来的。

package org.qst; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class WordCountRunner { public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); // 2. 设置jar包的加载路径 job.setJarByClass(WordCountRunner.class); // 3. 设置map类和reduce类 job.setMapperClass(WordCountMapper.class); job.setReducerClass(WordCountReducer.class); // 4. 设置map输出的kv类型 job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); // 5. 设置最终输出的kv类型 job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); // 6. 设置输入输出路径 FileInputFormat.setInputPaths(job, new Path("D:\\javatools\\work_count\\wc.txt")); FileOutputFormat.setOutputPath(job, new Path("D:\\javatools\\work_count\\output")); // 7. 提交任务 boolean result = job.waitForCompletion(true); System.exit(result ? 0 : 1); } } package org.qst; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Reducer; import java.io.IOException; import java.util.*; public class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable> { private List<String> results = new ArrayList<>(); private int sum; private IntWritable v = new IntWritable(); @Override protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { sum = 0; for (IntWritable value : values) { sum += value.get(); } results.add(String.format("%04d-%s", sum, key.toString())); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { Collections.sort(results, Collections.reverseOrder()); for (String result : results) { String[] parts = result.split("-", 2); String word = parts[1]; int count = Integer.parseInt(parts[0]); context.write(new Text(word), new IntWritable(count)); } } } package org.qst; /* 书写完成可以并行的map逻辑的类 */ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; import java.io.IOException; public class WordCountMapper extends Mapper<LongWritable, Text,Text, IntWritable> { Text k = new Text(); IntWritable v = new IntWritable(1); @Override protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException { //(17,long long time age) //1.将text格式转化为String String line = value.toString(); //long long time age //2.根据空格开始分词 String[] words = line.split(" ");//[long,long,time,age] for (String word:words){ k.set(word); context.write(k,v); //(long,1),(long,1),(time,1)(age,1) } } } 运行后报错:Exception in thread "main" 0: No such file or directory at org.apache.hadoop.io.nativeio.NativeIO$POSIX.chmod(NativeIO.java:388) at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:974) at org.apache.hadoop.fs.ChecksumFileSystem$1.apply(ChecksumFileSystem.java:591) at org.apache.hadoop.fs.ChecksumFileSystem$FsOperation.run(ChecksumFileSystem.java:572) at org.apache.hadoop.fs.ChecksumFileSystem.setPermission(ChecksumFileSystem.java:594) at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:752) at org.apache.hadoop.mapreduce.JobResourceUploader.mkdirs(JobResourceUploader.java:660) at org.apache.hadoop.mapreduce.JobResourceUploader.uploadResourcesInternal(JobResourceUploader.java:174) at org.apache.hadoop.mapreduce.JobResourceUploader.uploadResources(JobResourceUploader.java:135) at org.apache.hadoop.mapreduce.JobSubmitter.copyAndConfigureFiles(JobSubmitter.java:99) at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:194) at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1571) at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1568) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878) at org.apache.hadoop.mapreduce.Job.submit(Job.java:1568) at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1589) at org.qst.WordCountRunner.main(WordCountRunner.java:31)
10-28
D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\bin\java.exe "-javaagent:D:\Java\idea\IDEA 2023.1\IDEA neo\lib\idea_rt.jar=11999:D:\Java\idea\IDEA 2023.1\IDEA neo\bin" -Dfile.encoding=UTF-8 -classpath D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\charsets.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\deploy.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\access-bridge-64.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\cldrdata.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\dnsns.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\jaccess.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\jfxrt.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\localedata.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\nashorn.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\sunec.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\sunjce_provider.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\sunmscapi.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\sunpkcs11.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\ext\zipfs.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\javaws.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\jce.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\jfr.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\jfxswt.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\jsse.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\management-agent.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\plugin.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\resources.jar;D:\Java\jdk1.8\jdk1.8\jdk1.8.0_181\jre\lib\rt.jar;D:\Java\Hadoop_code\out\production\Hadoop_code;D:\443zrs\lib2\fst-2.50.jar;D:\443zrs\lib2\re2j-1.1.jar;D:\443zrs\lib2\asm-5.0.4.jar;D:\443zrs\lib2\guice-4.0.jar;D:\443zrs\lib2\jna-5.2.0.jar;D:\443zrs\lib2\avro-1.7.7.jar;D:\443zrs\lib2\gson-2.8.9.jar;D:\443zrs\lib2\okio-2.8.0.jar;D:\443zrs\lib2\jline-3.9.0.jar;D:\443zrs\lib2\jsch-0.1.55.jar;D:\443zrs\lib2\asm-tree-9.1.jar;D:\443zrs\lib2\jettison-1.1.jar;D:\443zrs\lib2\jsr305-3.0.2.jar;D:\443zrs\lib2\log4j-1.2.17.jar;D:\443zrs\lib2\okhttp-4.9.3.jar;D:\443zrs\lib2\dnsjava-2.1.7.jar;D:\443zrs\lib2\ehcache-3.3.1.jar;D:\443zrs\lib2\json-io-2.5.1.jar;D:\443zrs\lib2\objenesis-2.6.jar;D:\443zrs\lib2\paranamer-2.3.jar;D:\443zrs\lib2\guava-27.0-jre.jar;D:\443zrs\lib2\javax.inject-1.jar;D:\443zrs\lib2\snakeyaml-1.26.jar;D:\443zrs\lib2\aopalliance-1.0.jar;D:\443zrs\lib2\asm-commons-9.1.jar;D:\443zrs\lib2\commons-cli-1.2.jar;D:\443zrs\lib2\commons-net-3.6.jar;D:\443zrs\lib2\httpcore-4.4.13.jar;D:\443zrs\lib2\java-util-1.9.0.jar;D:\443zrs\lib2\jaxb-api-2.2.11.jar;D:\443zrs\lib2\kerb-core-1.0.1.jar;D:\443zrs\lib2\kerb-util-1.0.1.jar;D:\443zrs\lib2\kerby-xdr-1.0.1.jar;D:\443zrs\lib2\reload4j-1.2.22.jar;D:\443zrs\lib2\stax2-api-4.2.1.jar;D:\443zrs\lib2\zookeeper-3.5.6.jar;D:\443zrs\lib2\asm-analysis-9.1.jar;D:\443zrs\lib2\commons-io-2.8.0.jar;D:\443zrs\lib2\commons-text-1.4.jar;D:\443zrs\lib2\hadoop-kms-3.3.4.jar;D:\443zrs\lib2\hadoop-nfs-3.3.4.jar;D:\443zrs\lib2\jersey-core-1.19.jar;D:\443zrs\lib2\jersey-json-1.19.jar;D:\443zrs\lib2\json-smart-2.4.7.jar;D:\443zrs\lib2\jsr311-api-1.1.1.jar;D:\443zrs\lib2\kerb-admin-1.0.1.jar;D:\443zrs\lib2\kerby-asn1-1.0.1.jar;D:\443zrs\lib2\kerby-pkix-1.0.1.jar;D:\443zrs\lib2\kerby-util-1.0.1.jar;D:\443zrs\lib2\slf4j-api-1.7.36.jar;D:\443zrs\lib2\failureaccess-1.0.jar;D:\443zrs\lib2\guice-servlet-4.0.jar;D:\443zrs\lib2\hadoop-auth-3.3.4.jar;D:\443zrs\lib2\hadoop-hdfs-3.3.4.jar;D:\443zrs\lib2\httpclient-4.5.13.jar;D:\443zrs\lib2\jackson-xc-1.9.13.jar;D:\443zrs\lib2\jaxb-impl-2.2.3-1.jar;D:\443zrs\lib2\jersey-guice-1.19.jar;D:\443zrs\lib2\json-simple-1.1.1.jar;D:\443zrs\lib2\kerb-client-1.0.1.jar;D:\443zrs\lib2\kerb-common-1.0.1.jar;D:\443zrs\lib2\kerb-crypto-1.0.1.jar;D:\443zrs\lib2\kerb-server-1.0.1.jar;D:\443zrs\lib2\checker-qual-2.5.2.jar;D:\443zrs\lib2\commons-codec-1.15.jar;D:\443zrs\lib2\jersey-client-1.19.jar;D:\443zrs\lib2\jersey-server-1.19.jar;D:\443zrs\lib2\kerby-config-1.0.1.jar;D:\443zrs\lib2\leveldbjni-all-1.8.jar;D:\443zrs\lib2\metrics-core-3.2.4.jar;D:\443zrs\lib2\netty-3.10.6.Final.jar;D:\443zrs\lib2\bcpkix-jdk15on-1.60.jar;D:\443zrs\lib2\bcprov-jdk15on-1.60.jar;D:\443zrs\lib2\commons-math3-3.1.1.jar;D:\443zrs\lib2\hadoop-common-3.3.4.jar;D:\443zrs\lib2\jackson-core-2.12.7.jar;D:\443zrs\lib2\jersey-servlet-1.19.jar;D:\443zrs\lib2\kerb-identity-1.0.1.jar;D:\443zrs\lib2\protobuf-java-2.5.0.jar;D:\443zrs\lib2\snappy-java-1.1.8.2.jar;D:\443zrs\lib2\woodstox-core-5.3.0.jar;D:\443zrs\lib2\commons-lang3-3.12.0.jar;D:\443zrs\lib2\curator-client-4.2.0.jar;D:\443zrs\lib2\jackson-jaxrs-1.9.13.jar;D:\443zrs\lib2\kerb-simplekdc-1.0.1.jar;D:\443zrs\lib2\kotlin-stdlib-1.4.10.jar;D:\443zrs\lib2\token-provider-1.0.1.jar;D:\443zrs\lib2\zookeeper-jute-3.5.6.jar;D:\443zrs\lib2\accessors-smart-2.4.7.jar;D:\443zrs\lib2\commons-compress-1.21.jar;D:\443zrs\lib2\commons-daemon-1.0.13.jar;D:\443zrs\lib2\commons-logging-1.1.3.jar;D:\443zrs\lib2\curator-recipes-4.2.0.jar;D:\443zrs\lib2\hadoop-hdfs-nfs-3.3.4.jar;D:\443zrs\lib2\hadoop-hdfs-rbf-3.3.4.jar;D:\443zrs\lib2\hadoop-registry-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-api-3.3.4.jar;D:\443zrs\lib2\HikariCP-java7-2.4.12.jar;D:\443zrs\lib2\mssql-jdbc-6.2.1.jre7.jar;D:\443zrs\lib2\nimbus-jose-jwt-9.8.1.jar;D:\443zrs\lib2\slf4j-reload4j-1.7.36.jar;D:\443zrs\lib2\j2objc-annotations-1.1.jar;D:\443zrs\lib2\jcip-annotations-1.0-1.jar;D:\443zrs\lib2\netty-all-4.1.77.Final.jar;D:\443zrs\lib2\commons-beanutils-1.9.4.jar;D:\443zrs\lib2\curator-framework-4.2.0.jar;D:\443zrs\lib2\hadoop-hdfs-3.3.4-tests.jar;D:\443zrs\lib2\jackson-core-asl-1.9.13.jar;D:\443zrs\lib2\jackson-databind-2.12.7.jar;D:\443zrs\lib2\javax.servlet-api-3.1.0.jar;D:\443zrs\lib2\javax.websocket-api-1.0.jar;D:\443zrs\lib2\hadoop-annotations-3.3.4.jar;D:\443zrs\lib2\hadoop-hdfs-client-3.3.4.jar;D:\443zrs\lib2\hadoop-hdfs-httpfs-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-client-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-common-3.3.4.jar;D:\443zrs\lib2\netty-codec-4.1.77.Final.jar;D:\443zrs\lib2\commons-collections-3.2.2.jar;D:\443zrs\lib2\hadoop-common-3.3.4-tests.jar;D:\443zrs\lib2\hadoop-shaded-guava-1.1.1.jar;D:\443zrs\lib2\jackson-jaxrs-base-2.12.7.jar;D:\443zrs\lib2\jackson-mapper-asl-1.9.13.jar;D:\443zrs\lib2\jetty-io-9.4.43.v20210629.jar;D:\443zrs\lib2\netty-buffer-4.1.77.Final.jar;D:\443zrs\lib2\netty-common-4.1.77.Final.jar;D:\443zrs\lib2\swagger-annotations-1.5.4.jar;D:\443zrs\lib2\audience-annotations-0.5.0.jar;D:\443zrs\lib2\hadoop-yarn-registry-3.3.4.jar;D:\443zrs\lib2\jackson-annotations-2.12.7.jar;D:\443zrs\lib2\jakarta.xml.bind-api-2.3.2.jar;D:\443zrs\lib2\jetty-xml-9.4.43.v20210629.jar;D:\443zrs\lib2\netty-handler-4.1.77.Final.jar;D:\443zrs\lib2\hadoop-hdfs-rbf-3.3.4-tests.jar;D:\443zrs\lib2\jetty-http-9.4.43.v20210629.jar;D:\443zrs\lib2\jetty-jndi-9.4.43.v20210629.jar;D:\443zrs\lib2\jetty-plus-9.4.43.v20210629.jar;D:\443zrs\lib2\jetty-util-9.4.43.v20210629.jar;D:\443zrs\lib2\kotlin-stdlib-common-1.4.10.jar;D:\443zrs\lib2\netty-resolver-4.1.77.Final.jar;D:\443zrs\lib2\commons-configuration2-2.1.1.jar;D:\443zrs\lib2\jakarta.activation-api-1.2.1.jar;D:\443zrs\lib2\netty-codec-dns-4.1.77.Final.jar;D:\443zrs\lib2\netty-codec-xml-4.1.77.Final.jar;D:\443zrs\lib2\netty-transport-4.1.77.Final.jar;D:\443zrs\lib2\jetty-client-9.4.43.v20210629.jar;D:\443zrs\lib2\jetty-server-9.4.43.v20210629.jar;D:\443zrs\lib2\jetty-webapp-9.4.43.v20210629.jar;D:\443zrs\lib2\netty-codec-http-4.1.77.Final.jar;D:\443zrs\lib2\netty-codec-mqtt-4.1.77.Final.jar;D:\443zrs\lib2\netty-codec-smtp-4.1.77.Final.jar;D:\443zrs\lib2\hadoop-hdfs-client-3.3.4-tests.jar;D:\443zrs\lib2\hadoop-yarn-server-tests-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-services-api-3.3.4.jar;D:\443zrs\lib2\javax.websocket-client-api-1.0.jar;D:\443zrs\lib2\jetty-servlet-9.4.43.v20210629.jar;D:\443zrs\lib2\netty-codec-http2-4.1.77.Final.jar;D:\443zrs\lib2\netty-codec-redis-4.1.77.Final.jar;D:\443zrs\lib2\netty-codec-socks-4.1.77.Final.jar;D:\443zrs\lib2\netty-codec-stomp-4.1.77.Final.jar;D:\443zrs\lib2\websocket-api-9.4.43.v20210629.jar;D:\443zrs\lib2\animal-sniffer-annotations-1.17.jar;D:\443zrs\lib2\hadoop-hdfs-native-client-3.3.4.jar;D:\443zrs\lib2\hadoop-mapreduce-examples-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-server-common-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-server-router-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-services-core-3.3.4.jar;D:\443zrs\lib2\jetty-security-9.4.43.v20210629.jar;D:\443zrs\lib2\netty-resolver-dns-4.1.77.Final.jar;D:\443zrs\lib2\hadoop-mapreduce-client-hs-3.3.4.jar;D:\443zrs\lib2\hadoop-shaded-protobuf_3_7-1.1.1.jar;D:\443zrs\lib2\jetty-util-ajax-9.4.43.v20210629.jar;D:\443zrs\lib2\netty-codec-haproxy-4.1.77.Final.jar;D:\443zrs\lib2\netty-handler-proxy-4.1.77.Final.jar;D:\443zrs\lib2\netty-transport-udt-4.1.77.Final.jar;D:\443zrs\lib2\hadoop-mapreduce-client-app-3.3.4.jar;D:\443zrs\lib2\netty-codec-memcache-4.1.77.Final.jar;D:\443zrs\lib2\netty-transport-rxtx-4.1.77.Final.jar;D:\443zrs\lib2\netty-transport-sctp-4.1.77.Final.jar;D:\443zrs\lib2\websocket-client-9.4.43.v20210629.jar;D:\443zrs\lib2\websocket-common-9.4.43.v20210629.jar;D:\443zrs\lib2\websocket-server-9.4.43.v20210629.jar;D:\443zrs\lib2\hadoop-mapreduce-client-core-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-server-web-proxy-3.3.4.jar;D:\443zrs\lib2\jackson-jaxrs-json-provider-2.12.7.jar;D:\443zrs\lib2\jetty-annotations-9.4.43.v20210629.jar;D:\443zrs\lib2\websocket-servlet-9.4.43.v20210629.jar;D:\443zrs\lib2\geronimo-jcache_1.0_spec-1.0-alpha-1.jar;D:\443zrs\lib2\hadoop-mapreduce-client-common-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-server-nodemanager-3.3.4.jar;D:\443zrs\lib2\hadoop-hdfs-native-client-3.3.4-tests.jar;D:\443zrs\lib2\hadoop-mapreduce-client-shuffle-3.3.4.jar;D:\443zrs\lib2\hadoop-mapreduce-client-uploader-3.3.4.jar;D:\443zrs\lib2\jackson-module-jaxb-annotations-2.12.7.jar;D:\443zrs\lib2\hadoop-mapreduce-client-jobclient-3.3.4.jar;D:\443zrs\lib2\hadoop-mapreduce-client-hs-plugins-3.3.4.jar;D:\443zrs\lib2\hadoop-mapreduce-client-nativetask-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-applications-mawo-core-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-server-resourcemanager-3.3.4.jar;D:\443zrs\lib2\netty-transport-classes-epoll-4.1.77.Final.jar;D:\443zrs\lib2\hadoop-yarn-server-sharedcachemanager-3.3.4.jar;D:\443zrs\lib2\netty-transport-classes-kqueue-4.1.77.Final.jar;D:\443zrs\lib2\javax-websocket-client-impl-9.4.43.v20210629.jar;D:\443zrs\lib2\javax-websocket-server-impl-9.4.43.v20210629.jar;D:\443zrs\lib2\hadoop-mapreduce-client-jobclient-3.3.4-tests.jar;D:\443zrs\lib2\netty-resolver-dns-classes-macos-4.1.77.Final.jar;D:\443zrs\lib2\hadoop-yarn-applications-distributedshell-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-server-timeline-pluginstorage-3.3.4.jar;D:\443zrs\lib2\netty-transport-native-unix-common-4.1.77.Final.jar;D:\443zrs\lib2\hadoop-yarn-server-applicationhistoryservice-3.3.4.jar;D:\443zrs\lib2\hadoop-yarn-applications-unmanaged-am-launcher-3.3.4.jar;D:\443zrs\lib2\netty-transport-native-kqueue-4.1.77.Final-osx-x86_64.jar;D:\443zrs\lib2\netty-transport-native-epoll-4.1.77.Final-linux-x86_64.jar;D:\443zrs\lib2\netty-resolver-dns-native-macos-4.1.77.Final-osx-x86_64.jar;D:\443zrs\lib2\netty-transport-native-kqueue-4.1.77.Final-osx-aarch_64.jar;D:\443zrs\lib2\netty-transport-native-epoll-4.1.77.Final-linux-aarch_64.jar;D:\443zrs\lib2\netty-resolver-dns-native-macos-4.1.77.Final-osx-aarch_64.jar;D:\443zrs\lib2\listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar fuirt_sale 2025-11-04 02:02:45,349 WARN [main] util.Shell (Shell.java:<clinit>(692)) - Did not find winutils.exe: {} java.io.FileNotFoundException: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. -see https://wiki.apache.org/hadoop/WindowsProblems at org.apache.hadoop.util.Shell.fileNotFoundException(Shell.java:547) at org.apache.hadoop.util.Shell.getHadoopHomeDir(Shell.java:568) at org.apache.hadoop.util.Shell.getQualifiedBin(Shell.java:591) at org.apache.hadoop.util.Shell.<clinit>(Shell.java:688) at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:79) at org.apache.hadoop.conf.Configuration.getBoolean(Configuration.java:1712) at org.apache.hadoop.security.SecurityUtil.setConfigurationInternal(SecurityUtil.java:99) at org.apache.hadoop.security.SecurityUtil.<clinit>(SecurityUtil.java:88) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:312) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:300) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:575) at org.apache.hadoop.mapreduce.task.JobContextImpl.<init>(JobContextImpl.java:72) at org.apache.hadoop.mapreduce.Job.<init>(Job.java:152) at org.apache.hadoop.mapreduce.Job.getInstance(Job.java:195) at org.apache.hadoop.mapreduce.Job.getInstance(Job.java:214) at fuirt_sale.main(fuirt_sale.java:18) Caused by: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. at org.apache.hadoop.util.Shell.checkHadoopHomeInner(Shell.java:467) at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:438) at org.apache.hadoop.util.Shell.<clinit>(Shell.java:515) ... 12 more 2025-11-04 02:02:50,887 WARN [main] impl.MetricsConfig (MetricsConfig.java:loadFirst(136)) - Cannot locate configuration: tried hadoop-metrics2-jobtracker.properties,hadoop-metrics2.properties 2025-11-04 02:02:50,917 INFO [main] impl.MetricsSystemImpl (MetricsSystemImpl.java:startTimer(378)) - Scheduled Metric snapshot period at 10 second(s). 2025-11-04 02:02:50,917 INFO [main] impl.MetricsSystemImpl (MetricsSystemImpl.java:start(191)) - JobTracker metrics system started 2025-11-04 02:02:51,257 WARN [main] mapreduce.JobResourceUploader (JobResourceUploader.java:uploadResourcesInternal(149)) - Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this. 2025-11-04 02:02:51,271 WARN [main] mapreduce.JobResourceUploader (JobResourceUploader.java:uploadJobJar(482)) - No job jar file set. User classes may not be found. See Job or Job#setJar(String). 2025-11-04 02:02:51,296 INFO [main] input.FileInputFormat (FileInputFormat.java:listStatus(300)) - Total input files to process : 1 2025-11-04 02:02:51,337 INFO [main] mapreduce.JobSubmitter (JobSubmitter.java:submitJobInternal(202)) - number of splits:1 2025-11-04 02:02:51,386 INFO [main] mapreduce.JobSubmitter (JobSubmitter.java:printTokens(298)) - Submitting tokens for job: job_local1602603729_0001 2025-11-04 02:02:51,386 INFO [main] mapreduce.JobSubmitter (JobSubmitter.java:printTokens(299)) - Executing with tokens: [] 2025-11-04 02:02:51,449 INFO [main] mapreduce.Job (Job.java:submit(1575)) - The url to track the job: http://localhost:8080/ 2025-11-04 02:02:51,449 INFO [main] mapreduce.Job (Job.java:monitorAndPrintJob(1620)) - Running job: job_local1602603729_0001 2025-11-04 02:02:51,449 INFO [Thread-6] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(501)) - OutputCommitter set in config null 2025-11-04 02:02:51,453 INFO [Thread-6] output.FileOutputCommitter (FileOutputCommitter.java:<init>(142)) - File Output Committer Algorithm version is 2 2025-11-04 02:02:51,453 INFO [Thread-6] output.FileOutputCommitter (FileOutputCommitter.java:<init>(157)) - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false 2025-11-04 02:02:51,453 INFO [Thread-6] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(519)) - OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter 2025-11-04 02:02:51,467 WARN [Thread-6] mapred.LocalJobRunner (LocalJobRunner.java:run(590)) - job_local1602603729_0001 org.apache.hadoop.security.AccessControlException: Permission denied: user=zrs, access=WRITE, inode="/user/root/data2":root:supergroup:drwxr-xr-x at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:506) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:346) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermissionWithContext(FSPermissionChecker.java:370) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:240) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1943) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1927) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkAncestorAccess(FSDirectory.java:1886) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:60) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3405) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1159) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:740) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2509) at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2483) at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1485) at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1482) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1499) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1474) at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:2388) at org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter.setupJob(FileOutputCommitter.java:356) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:541) Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=zrs, access=WRITE, inode="/user/root/data2":root:supergroup:drwxr-xr-x at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:506) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:346) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermissionWithContext(FSPermissionChecker.java:370) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:240) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1943) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1927) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkAncestorAccess(FSDirectory.java:1886) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:60) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3405) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1159) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:740) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1612) at org.apache.hadoop.ipc.Client.call(Client.java:1558) at org.apache.hadoop.ipc.Client.call(Client.java:1455) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:242) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:129) at com.sun.proxy.$Proxy9.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:674) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359) at com.sun.proxy.$Proxy10.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2507) ... 9 more 2025-11-04 02:02:52,464 INFO [main] mapreduce.Job (Job.java:monitorAndPrintJob(1641)) - Job job_local1602603729_0001 running in uber mode : false 2025-11-04 02:02:52,466 INFO [main] mapreduce.Job (Job.java:monitorAndPrintJob(1648)) - map 0% reduce 0% 2025-11-04 02:02:52,469 INFO [main] mapreduce.Job (Job.java:monitorAndPrintJob(1661)) - Job job_local1602603729_0001 failed with state FAILED due to: NA 2025-11-04 02:02:52,483 INFO [main] mapreduce.Job (Job.java:monitorAndPrintJob(1666)) - Counters: 0 进程已结束,退出代码1
最新发布
11-05
评论 2
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值