运行异常:
Exception in thread “main” java.lang.NoSuchMethodError: io.netty.buffer.PooledByteBufAllocator.metric()Lio/netty/buffer/PooledByteBufAllocatorMetric;
at org.apache.spark.network.util.NettyMemoryMetrics.registerMetrics(NettyMemoryMetrics.java:80)
at org.apache.spark.network.util.NettyMemoryMetrics.(NettyMemoryMetrics.java:76)
at org.apache.spark.network.client.TransportClientFactory.(TransportClientFactory.java:109)
at org.apache.spark.network.TransportContext.createClientFactory(TransportContext.java:99)
at org.apache.spark.rpc.netty.NettyRpcEnv.(NettyRpcEnv.scala:71)
at org.apache.spark.rpc.netty.NettyRpcEnvFactory.create(NettyRpcEnv.scala:461)
at org.apache.spark.rpc.RpcEnv.create(RpcEnv.scala:57)atorg.apache.spark.SparkEnv.create(RpcEnv.scala:57)
at org.apache.spark.SparkEnv.create(RpcEnv.scala:57)atorg.apache.spark.SparkEnv.create(SparkEnv.scala:249)
at org.apache.spark.SparkEnv.createDriverEnv(SparkEnv.scala:175)atorg.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:256)atorg.apache.spark.SparkContext.<init>(SparkContext.scala:423)atcom.cyp.business.TopN10.createDriverEnv(SparkEnv.scala:175)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:256)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:423)
at com.cyp.business.TopN10.createDriverEnv(SparkEnv.scala:175)atorg.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:256)atorg.apache.spark.SparkContext.<init>(SparkContext.scala:423)atcom.cyp.business.TopN10.delayedEndpointcomcomcomcypbusinessbusinessbusinessTopN101(TopN10.scala:10)atcom.cyp.business.TopN101(TopN10.scala:10)
at com.cyp.business.TopN101(TopN10.scala:10)atcom.cyp.business.TopN10delayedInitbody.apply(TopN10.scala:6)atscala.Function0body.apply(TopN10.scala:6)
at scala.Function0body.apply(TopN10.scala:6)atscala.Function0class.applymcVmcVmcVsp(Function0.scala:40)
at scala.runtime.AbstractFunction0.applymcVmcVmcVsp(AbstractFunction0.scala:12)
at scala.App
anonfun$main1.apply(App.scala:76)atscala.Appanonfun1.apply(App.scala:76)atscala.App
anonfun1.apply(App.scala:76)atscala.Appanonfunmain1.apply(App.scala:76)atscala.Appanonfun1.apply(App.scala:76)atscala.App
anonfun1.apply(App.scala:76)atscala.Appanonfunmain1.apply(App.scala:76)atscala.collection.immutable.List.foreach(List.scala:383)atscala.collection.generic.TraversableForwarder1.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:383)
at scala.collection.generic.TraversableForwarder1.apply(App.scala:76)atscala.collection.immutable.List.foreach(List.scala:383)atscala.collection.generic.TraversableForwarderclass.foreach(TraversableForwarder.scala:35)
at scala.Appclass.main(App.scala:76)atcom.cyp.business.TopN10class.main(App.scala:76)
at com.cyp.business.TopN10class.main(App.scala:76)atcom.cyp.business.TopN10.main(TopN10.scala:6)
at com.cyp.business.TopN10.main(TopN10.scala)
环境: spark2.3.0 hbase:1.2.0
Spark2.3.0升级Netty to 4.1.17
[SPARK-19810] Remove support for Scala 2.10
[SPARK-22324] Upgrade Arrow to 0.8.0 and Netty to 4.1.17
Spark2.3.0依赖的netty-all-4.1.17.Final.jar 与 hbase1.2.0依赖的netty-all-4.0.23.Final.jar 冲突
解决:
pom.xml统一netty-all 版本
io.netty
netty-all
4.1.17.Final