java.lang.NoSuchMethodError: org.apache.hadoop.mapred.TaskID.(Lorg/apache/hadoop/mapreduce/JobID;Lorg/apache/hadoop/mapreduce/TaskType;I)V
at org.apache.spark.rdd.HadoopRDD.addLocalConfiguration(HadoopRDD.scala:384)atorg.apache.spark.rdd.HadoopRDD.addLocalConfiguration(HadoopRDD.scala:384)
at org.apache.spark.rdd.HadoopRDD.addLocalConfiguration(HadoopRDD.scala:384)atorg.apache.spark.rdd.HadoopRDD$anon1.<init>(HadoopRDD.scala:246)atorg.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:211)atorg.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:102)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:287)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:287)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD1.<init>(HadoopRDD.scala:246)
at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:211)
at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:102)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
at org.apache.spark.rdd.RDD1.<init>(HadoopRDD.scala:246)atorg.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:211)atorg.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:102)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:287)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:287)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD$anonfun8.apply(RDD.scala:336)atorg.apache.spark.rdd.RDD8.apply(RDD.scala:336)
at org.apache.spark.rdd.RDD8.apply(RDD.scala:336)atorg.apache.spark.rdd.RDD$anonfun8.apply(RDD.scala:334)atorg.apache.spark.storage.BlockManager8.apply(RDD.scala:334)
at org.apache.spark.storage.BlockManager8.apply(RDD.scala:334)atorg.apache.spark.storage.BlockManageranonfunanonfunanonfundoPutIterator1.apply(BlockManager.scala:957)atorg.apache.spark.storage.BlockManager1.apply(BlockManager.scala:957)
at org.apache.spark.storage.BlockManager1.apply(BlockManager.scala:957)atorg.apache.spark.storage.BlockManageranonfunanonfunanonfundoPutIterator1.apply(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)atorg.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)atorg.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:285)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD1.apply(BlockManager.scala:948)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:285)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
at org.apache.spark.rdd.RDD1.apply(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)atorg.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)atorg.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:285)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD$anonfun8.apply(RDD.scala:336)atorg.apache.spark.rdd.RDD8.apply(RDD.scala:336)
at org.apache.spark.rdd.RDD8.apply(RDD.scala:336)atorg.apache.spark.rdd.RDD$anonfun8.apply(RDD.scala:334)atorg.apache.spark.storage.BlockManager8.apply(RDD.scala:334)
at org.apache.spark.storage.BlockManager8.apply(RDD.scala:334)atorg.apache.spark.storage.BlockManageranonfunanonfunanonfundoPutIterator1.apply(BlockManager.scala:957)atorg.apache.spark.storage.BlockManager1.apply(BlockManager.scala:957)
at org.apache.spark.storage.BlockManager1.apply(BlockManager.scala:957)atorg.apache.spark.storage.BlockManageranonfunanonfunanonfundoPutIterator1.apply(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)atorg.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)atorg.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:285)atorg.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)atorg.apache.spark.scheduler.Task.run(Task.scala:99)atorg.apache.spark.executor.Executor1.apply(BlockManager.scala:948)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:285)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor1.apply(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)atorg.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)atorg.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:285)atorg.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)atorg.apache.spark.scheduler.Task.run(Task.scala:99)atorg.apache.spark.executor.ExecutorTaskRunner.run(Executor.scala:282)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutorWorker.run(ThreadPoolExecutor.java:624)atjava.lang.Thread.run(Thread.java:748)19/01/2410:09:24WARNscheduler.TaskSetManager:Losttask0.0instage0.0(TID0,localhost,executordriver):java.lang.NoSuchMethodError:org.apache.hadoop.mapred.TaskID.<init>(Lorg/apache/hadoop/mapreduce/JobID;Lorg/apache/hadoop/mapreduce/TaskType;I)Vatorg.apache.spark.rdd.HadoopRDDWorker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
19/01/24 10:09:24 WARN scheduler.TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.NoSuchMethodError: org.apache.hadoop.mapred.TaskID.<init>(Lorg/apache/hadoop/mapreduce/JobID;Lorg/apache/hadoop/mapreduce/TaskType;I)V
at org.apache.spark.rdd.HadoopRDDWorker.run(ThreadPoolExecutor.java:624)atjava.lang.Thread.run(Thread.java:748)19/01/2410:09:24WARNscheduler.TaskSetManager:Losttask0.0instage0.0(TID0,localhost,executordriver):java.lang.NoSuchMethodError:org.apache.hadoop.mapred.TaskID.<init>(Lorg/apache/hadoop/mapreduce/JobID;Lorg/apache/hadoop/mapreduce/TaskType;I)Vatorg.apache.spark.rdd.HadoopRDD.addLocalConfiguration(HadoopRDD.scala:384)
at org.apache.spark.rdd.HadoopRDDKaTeX parse error: Can't use function '$' in math mode at position 5: anon$̲1.<init>(Hadoop…anonfun$8.apply(RDD.scala:336)
问题解决:
Hadoop的jar包冲突
java.lang.NoSuchMethodError: org.apache.hadoop.mapred.TaskID.<init>
最新推荐文章于 2024-06-03 09:52:44 发布