Node.js 常用模块之:fs、os、path、repl、util

博客主要围绕Node中其他常用模块展开,虽未给出具体内容,但可知聚焦于Node相关模块知识,属于信息技术领域的后端开发范畴。

Node中其他常用模块

starting org.apache.spark.deploy.master.Master, logging to /opt/module/spark-3.5.0/l ogs/spark-root-org.apache.spark.deploy.master.Master-1-master.out slave2: starting org.apache.spark.deploy.worker.Worker, logging to /opt/module/spark -3.5.0/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-slave2.out slave1: starting org.apache.spark.deploy.worker.Worker, logging to /opt/module/spark -3.5.0/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-slave1.out master: starting org.apache.spark.deploy.worker.Worker, logging to /opt/module/spark -3.5.0/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-master.out [root@master sbin]# spark-shell Setting default log level to "WARN". To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(ne wLevel). 25/10/27 17:44:24 WARN NativeCodeLoader: Unable to load native-hadoop library for yo ur platform... using builtin-java classes where applicable 25/10/27 17:44:29 ERROR SparkContext: Error initializing SparkContext. org.apache.hadoop.hdfs.server.namenode.SafeModeException: Cannot create directory /s park-logs. Name node is in safe mode. The reported blocks 385 needs additional 1 blocks to reach the threshold 0.9990 of t otal blocks 387. The minimum number of live datanodes is not required. Safe mode will be turned off a utomatically once the thresholds have been reached. NamenodeHostName:Master at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.newSafemodeException( FSNamesystem.java:1498) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode (FSNamesystem.java:1485) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.j ava:3191) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeR pcServer.java:1157) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTransla torPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:714) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$Client NamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(Pr otobufRpcEngine.java:527) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1036) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1015) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:943) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation .java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2943) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAc cessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConst ructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteExceptio n.java:121) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteExcepti on.java:88) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2509) at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2483) at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSys tem.java:1485) at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSys tem.java:1482) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolve r.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFi leSystem.java:1499) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem .java:1474) at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:2388) at org.apache.spark.deploy.SparkHadoopUtil$.createFile(SparkHadoopUtil.scala :572) at org.apache.spark.deploy.history.EventLogFileWriter.initLogFile(EventLogFi leWriters.scala:98) at org.apache.spark.deploy.history.SingleEventLogFileWriter.start(EventLogFi leWriters.scala:223) at org.apache.spark.scheduler.EventLoggingListener.start(EventLoggingListene r.scala:81) at org.apache.spark.SparkContext.<init>(SparkContext.scala:637) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2888) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSes sion.scala:1099) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala: 1093) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106) at $line3.$read$$iw$$iw.<init>(<console>:15) at $line3.$read$$iw.<init>(<console>:42) at $line3.$read.<init>(<console>:44) at $line3.$read$.<init>(<console>:48) at $line3.$read$.<clinit>(<console>) at $line3.$eval$.$print$lzycompute(<console>:7) at $line3.$eval$.$print(<console>:6) at $line3.$eval.$print(<console>) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java :62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorI mpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747) at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020) at scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568) at scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.s cala:36) at scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader. scala:116) at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFil eClassLoader.scala:41) at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564) at scala.tools.nsc.interpreter.IMain.$anonfun$quietRun$1(IMain.scala:216) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:206) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:216) at org.apache.spark.repl.SparkILoop.$anonfun$initializeSpark$2(SparkILoop.sc ala:83) at scala.collection.immutable.List.foreach(List.scala:431) at org.apache.spark.repl.SparkILoop.$anonfun$initializeSpark$1(SparkILoop.sc ala:83) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:97) at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:83) at org.apache.spark.repl.SparkILoop.$anonfun$process$4(SparkILoop.scala:165) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at scala.tools.nsc.interpreter.ILoop.$anonfun$mumly$1(ILoop.scala:166) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:206) at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:163) at org.apache.spark.repl.SparkILoop.loopPostInit$1(SparkILoop.scala:153) at org.apache.spark.repl.SparkILoop.$anonfun$process$10(SparkILoop.scala:221 ) at org.apache.spark.repl.SparkILoop.withSuppressedSettings$1(SparkILoop.scal a:189) at org.apache.spark.repl.SparkILoop.startup$1(SparkILoop.scala:201) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:236) at org.apache.spark.repl.Main$.doMain(Main.scala:78) at org.apache.spark.repl.Main$.main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java :62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorI mpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala: 52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$ runMain(SparkSubmit.scala:1029) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:194) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:217) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:11 20) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1129) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namen ode.SafeModeException): Cannot create directory /spark-logs. Name node is in safe mo de. The reported blocks 385 needs additional 1 blocks to reach the threshold 0.9990 of t otal blocks 387. The minimum number of live datanodes is not required. Safe mode will be turned off a utomatically once the thresholds have been reached. NamenodeHostName:Master at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.newSafemodeException( FSNamesystem.java:1498) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode (FSNamesystem.java:1485) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.j ava:3191) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeR pcServer.java:1157) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTransla torPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:714) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$Client NamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(Pr otobufRpcEngine.java:527) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1036) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1015) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:943) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation .java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2943) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1612) at org.apache.hadoop.ipc.Client.call(Client.java:1558) at org.apache.hadoop.ipc.Client.call(Client.java:1455) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine 2.java:242) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine 2.java:129) at com.sun.proxy.$Proxy31.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdi rs(ClientNamenodeProtocolTranslatorPB.java:674) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java :62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorI mpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvoc ationHandler.java:422) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(Retry InvocationHandler.java:165) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvoca tionHandler.java:157) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryIn vocationHandler.java:95) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationH andler.java:359) at com.sun.proxy.$Proxy32.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2507) ... 72 more 25/10/27 17:44:29 ERROR Main: Failed to initialize Spark session. org.apache.hadoop.hdfs.server.namenode.SafeModeException: Cannot create directory /s park-logs. Name node is in safe mode. The reported blocks 385 needs additional 1 blocks to reach the threshold 0.9990 of t otal blocks 387. The minimum number of live datanodes is not required. Safe mode will be turned off a utomatically once the thresholds have been reached. NamenodeHostName:Master at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.newSafemodeException( FSNamesystem.java:1498) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode (FSNamesystem.java:1485) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.j ava:3191) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeR pcServer.java:1157) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTransla torPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:714) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$Client NamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(Pr otobufRpcEngine.java:527) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1036) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1015) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:943) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation .java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2943) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAc cessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConst ructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteExceptio n.java:121) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteExcepti on.java:88) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2509) at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2483) at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSys tem.java:1485) at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSys tem.java:1482) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolve r.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFi leSystem.java:1499) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem .java:1474) at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:2388) at org.apache.spark.deploy.SparkHadoopUtil$.createFile(SparkHadoopUtil.scala :572) at org.apache.spark.deploy.history.EventLogFileWriter.initLogFile(EventLogFi leWriters.scala:98) at org.apache.spark.deploy.history.SingleEventLogFileWriter.start(EventLogFi leWriters.scala:223) at org.apache.spark.scheduler.EventLoggingListener.start(EventLoggingListene r.scala:81) at org.apache.spark.SparkContext.<init>(SparkContext.scala:637) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2888) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSes sion.scala:1099) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala: 1093) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106) at $line3.$read$$iw$$iw.<init>(<console>:15) at $line3.$read$$iw.<init>(<console>:42) at $line3.$read.<init>(<console>:44) at $line3.$read$.<init>(<console>:48) at $line3.$read$.<clinit>(<console>) at $line3.$eval$.$print$lzycompute(<console>:7) at $line3.$eval$.$print(<console>:6) at $line3.$eval.$print(<console>) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java :62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorI mpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747) at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020) at scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568) at scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.s cala:36) at scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader. scala:116) at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFil eClassLoader.scala:41) at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564) at scala.tools.nsc.interpreter.IMain.$anonfun$quietRun$1(IMain.scala:216) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:206) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:216) at org.apache.spark.repl.SparkILoop.$anonfun$initializeSpark$2(SparkILoop.sc ala:83) at scala.collection.immutable.List.foreach(List.scala:431) at org.apache.spark.repl.SparkILoop.$anonfun$initializeSpark$1(SparkILoop.sc ala:83) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:97) at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:83) at org.apache.spark.repl.SparkILoop.$anonfun$process$4(SparkILoop.scala:165) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at scala.tools.nsc.interpreter.ILoop.$anonfun$mumly$1(ILoop.scala:166) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:206) at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:163) at org.apache.spark.repl.SparkILoop.loopPostInit$1(SparkILoop.scala:153) at org.apache.spark.repl.SparkILoop.$anonfun$process$10(SparkILoop.scala:221 ) at org.apache.spark.repl.SparkILoop.withSuppressedSettings$1(SparkILoop.scal a:189) at org.apache.spark.repl.SparkILoop.startup$1(SparkILoop.scala:201) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:236) at org.apache.spark.repl.Main$.doMain(Main.scala:78) at org.apache.spark.repl.Main$.main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java :62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorI mpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala: 52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$ runMain(SparkSubmit.scala:1029) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:194) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:217) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:11 20) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1129) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namen ode.SafeModeException): Cannot create directory /spark-logs. Name node is in safe mo de. The reported blocks 385 needs additional 1 blocks to reach the threshold 0.9990 of t otal blocks 387. The minimum number of live datanodes is not required. Safe mode will be turned off a utomatically once the thresholds have been reached. NamenodeHostName:Master at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.newSafemodeException( FSNamesystem.java:1498) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode (FSNamesystem.java:1485) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.j ava:3191) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeR pcServer.java:1157) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTransla torPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:714) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$Client NamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(Pr otobufRpcEngine.java:527) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1036) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1015) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:943) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation .java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2943) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1612) at org.apache.hadoop.ipc.Client.call(Client.java:1558) at org.apache.hadoop.ipc.Client.call(Client.java:1455) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine 2.java:242) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine 2.java:129) at com.sun.proxy.$Proxy31.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdi rs(ClientNamenodeProtocolTranslatorPB.java:674) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java :62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorI mpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvoc ationHandler.java:422) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(Retry InvocationHandler.java:165) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvoca tionHandler.java:157) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryIn vocationHandler.java:95) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationH andler.java:359) at com.sun.proxy.$Proxy32.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2507) ... 72 more
最新发布
10-28
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值