2018-11-21 11:46:41 WARN TaskSetManager:66 - Lost task 4.0 in stage 0.0 (TID 4, 192.168.1.140, executor 1): java.lang.NoSuchFieldError: NO_INTS
at com.fasterxml.jackson.dataformat.smile.SmileParserBase.(SmileParserBase.java:185)
at com.fasterxml.jackson.dataformat.smile.SmileParser.(SmileParser.java:128)
at com.fasterxml.jackson.dataformat.smile.SmileParserBootstrapper.constructParser(SmileParserBootstrapper.java:104)
at com.fasterxml.jackson.dataformat.smile.SmileFactory._createParser(SmileFactory.java:401)
at com.fasterxml.jackson.dataformat.smile.SmileFactory.createParser(SmileFactory.java:315)
at org.elasticsearch.common.xcontent.smile.SmileXContent.createParser(SmileXContent.java:87)
at org.elasticsearch.common.xcontent.XContentHelper.convertToMap(XContentHelper.java:140)
at org.elasticsearch.common.xcontent.XContentHelper.convertToMap(XContentHelper.java:114)
at org.elasticsearch.common.xcontent.XContentHelper.convertToMap(XContentHelper.java:92)
at org.elasticsearch.search.lookup.SourceLookup.sourceAsMapAndType(SourceLookup.java:89)
at org.elasticsearch.search.lookup.SourceLookup.sourceAsMap(SourceLookup.java:93)
at org.elasticsearch.search.SearchHit.getSourceAsMap(SearchHit.java:347)
at org.elasticsearch.search.SearchHit.sourceAsMap(SearchHit.java:333)
at org.elasticsearch.search.SearchHit.getSource(SearchHit.java:301)
at
at scala.collection.Iteratorclass.foreach(Iterator.scala:893)atscala.collection.AbstractIterator.foreach(Iterator.scala:1336)atorg.apache.spark.rdd.RDDclass.foreach(Iterator.scala:893)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
at org.apache.spark.rdd.RDDclass.foreach(Iterator.scala:893)atscala.collection.AbstractIterator.foreach(Iterator.scala:1336)atorg.apache.spark.rdd.RDDanonfunanonfunanonfunforeach111anonfunanonfunanonfunapply28.apply(RDD.scala:921)atorg.apache.spark.rdd.RDD28.apply(RDD.scala:921)
at org.apache.spark.rdd.RDD28.apply(RDD.scala:921)atorg.apache.spark.rdd.RDDanonfunanonfunanonfunforeach111anonfunanonfunanonfunapply28.apply(RDD.scala:921)atorg.apache.spark.SparkContext28.apply(RDD.scala:921)
at org.apache.spark.SparkContext28.apply(RDD.scala:921)atorg.apache.spark.SparkContextanonfunanonfunanonfunrunJob5.apply(SparkContext.scala:2074)atorg.apache.spark.SparkContext5.apply(SparkContext.scala:2074)
at org.apache.spark.SparkContext5.apply(SparkContext.scala:2074)atorg.apache.spark.SparkContextanonfunanonfunanonfunrunJob5.apply(SparkContext.scala:2074)atorg.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)atorg.apache.spark.scheduler.Task.run(Task.scala:109)atorg.apache.spark.executor.Executor5.apply(SparkContext.scala:2074)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor5.apply(SparkContext.scala:2074)atorg.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)atorg.apache.spark.scheduler.Task.run(Task.scala:109)atorg.apache.spark.executor.ExecutorTaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
出现该异常时,是由于elasticsearch和jackson不符合造成的,需要根据elasticsearch版本下载对应的jackson,我用的elasticsearch5.6.2所以对应的版本是jackson版本是2.8.6
是由于elasticsearch和jackson不符合造成的,需要根据elasticsearch版本下载对应的jackson
最新推荐文章于 2024-01-25 21:51:15 发布