Flink1.9.0 on yarn集羣搭建過程報錯

最近在搭建最新版本的Flink1.9.0 on yarn的過程中遇到這樣一個報錯.

Exception Details:
  Location:
    scala/collection/immutable/HashMap$HashTrieMap.split()Lscala/collection/immutable/Seq; @249: goto
  Reason:
    Error exists in the bytecode
  Bytecode:
    0000000: 2ab6 0064 04a0 001e b200 c1b2 00c6 04bd
    0000010: 0002 5903 2a53 c000 c8b6 00cc b600 d0c0
    0000020: 00d2 b02a b600 38b8 0042 3c1b 04a4 0156
    0000030: 1b05 6c3d 2a1b 056c 2ab6 0038 b700 d43e
    0000040: 2ab6 0038 021d 787e 3604 2ab6 0038 0210
    0000050: 201d 647c 7e36 05bb 0019 59b2 00c6 2ab6
    0000060: 003a c000 c8b6 00d8 b700 db1c b600 df3a
    0000070: 0619 06c6 001a 1906 b600 e3c0 008b 3a07
    0000080: 1906 b600 e6c0 008b 3a08 a700 0dbb 00e8
    0000090: 5919 06b7 00eb bf19 073a 0919 083a 0abb
    00000a0: 0002 5915 0419 09bb 0019 59b2 00c6 1909
    00000b0: c000 c8b6 00d8 b700 db03 b800 f13a 0e3a
    00000c0: 0d03 190d b900 f501 0019 0e3a 1136 1036
    00000d0: 0f15 0f15 109f 0027 150f 0460 1510 190d
    00000e0: 150f b900 f802 00c0 0005 3a17 1911 1917
    00000f0: b800 fc3a 1136 1036 0fa7 ffd8 1911 b801
    0000100: 00b7 0069 3a0b bb00 0259 1505 190a bb00
    0000110: 1959 b200 c619 0ac0 00c8 b600 d8b7 00db
    0000120: 03b8 00f1 3a13 3a12 0319 12b9 00f5 0100
    0000130: 1913 3a16 3615 3614 1514 1515 9f00 2715
    0000140: 1404 6015 1519 1215 14b9 00f8 0200 c000
    0000150: 053a 1819 1619 18b8 0103 3a16 3615 3614
    0000160: a7ff d819 16b8 0100 b700 693a 0cbb 0105
    0000170: 5919 0bbb 0105 5919 0cb2 010a b701 0db7
    0000180: 010d b02a b600 3a03 32b6 010f b0       
  Stackmap Table:
    same_frame(@35)
    full_frame(@141,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118]},{})
    append_frame(@151,Object[#139],Object[#139])
    full_frame(@209,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Top,Top,Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#159],Uninitialized[#159],Integer,Object[#139]})
    full_frame(@252,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Top,Top,Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#159],Uninitialized[#159],Integer,Object[#139]})
    full_frame(@312,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Object[#2],Top,Object[#25],Object[#62],Integer,Integer,Object[#116],Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#262],Uninitialized[#262],Integer,Object[#139]})
    full_frame(@355,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Object[#2],Top,Object[#25],Object[#62],Integer,Integer,Object[#116],Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#262],Uninitialized[#262],Integer,Object[#139]})
    full_frame(@387,{Object[#2],Integer},{})

	at scala.collection.immutable.HashMap$.scala$collection$immutable$HashMap$$makeHashTrieMap(HashMap.scala:181)
	at scala.collection.immutable.HashMap$HashMap1.updated0(HashMap.scala:216)
	at scala.collection.immutable.HashMap.updated(HashMap.scala:58)
	at scala.collection.immutable.Map$Map4.updated(Map.scala:224)
	at scala.collection.immutable.Map$Map4.$plus(Map.scala:225)
	at scala.collection.immutable.Map$Map4.$plus(Map.scala:197)
	at scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:29)
	at scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:25)
	at scala.collection.TraversableOnce.$anonfun$toMap$1(TraversableOnce.scala:316)
	at scala.collection.TraversableOnce$$Lambda$27/1787079037.apply(Unknown Source)
	at scala.collection.immutable.List.foreach(List.scala:388)
	at scala.collection.TraversableOnce.toMap(TraversableOnce.scala:315)
	at scala.collection.TraversableOnce.toMap$(TraversableOnce.scala:313)
	at scala.collection.AbstractTraversable.toMap(Traversable.scala:104)
	at scala.concurrent.duration.Duration$.<init>(Duration.scala:88)
	at scala.concurrent.duration.Duration$.<clinit>(Duration.scala)
	at scala.concurrent.duration.package$DurationInt$.durationIn$extension(package.scala:44)
	at scala.concurrent.duration.package$DurationInt.durationIn(package.scala:44)
	at scala.concurrent.duration.DurationConversions.seconds(DurationConversions.scala:33)
	at scala.concurrent.duration.DurationConversions.seconds$(DurationConversions.scala:33)
	at scala.concurrent.duration.package$DurationInt.seconds(package.scala:43)
	at org.apache.flink.runtime.akka.AkkaUtils$.<init>(AkkaUtils.scala:52)
	at org.apache.flink.runtime.akka.AkkaUtils$.<clinit>(AkkaUtils.scala)
	at org.apache.flink.runtime.akka.AkkaUtils.getFlinkActorSystemName(AkkaUtils.scala)
	at org.apache.flink.runtime.clusterframework.BootstrapTools.startActorSystem(BootstrapTools.java:114)
	at org.apache.flink.runtime.clusterframework.BootstrapTools.startActorSystem(BootstrapTools.java:87)
	at org.apache.flink.runtime.rpc.akka.AkkaRpcServiceUtils.createRpcService(AkkaRpcServiceUtils.java:84)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.createRpcService(ClusterEntrypoint.java:272)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.initializeServices(ClusterEntrypoint.java:248)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.runCluster(ClusterEntrypoint.java:202)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.lambda$startCluster$0(ClusterEntrypoint.java:164)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint$$Lambda$21/1233990028.call(Unknown Source)
	at org.apache.flink.runtime.security.HadoopSecurityContext$$Lambda$22/2036127838.run(Unknown Source)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754)
	at org.apache.flink.runtime.security.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.startCluster(ClusterEntrypoint.java:163)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.runClusterEntrypoint(ClusterEntrypoint.java:501)
	at org.apache.flink.yarn.entrypoint.YarnSessionClusterEntrypoint.main(YarnSessionClusterEntrypoint.java:93)
.
2019-09-04 02:16:37,486 ERROR org.apache.flink.runtime.entrypoint.ClusterEntrypoint         - Could not start cluster entrypoint YarnSessionClusterEntrypoint.
org.apache.flink.runtime.entrypoint.ClusterEntrypointException: Failed to initialize the cluster entrypoint YarnSessionClusterEntrypoint.
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.startCluster(ClusterEntrypoint.java:182)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.runClusterEntrypoint(ClusterEntrypoint.java:501)
	at org.apache.flink.yarn.entrypoint.YarnSessionClusterEntrypoint.main(YarnSessionClusterEntrypoint.java:93)
Caused by: java.lang.VerifyError: Uninitialized object exists on backward branch 209
Exception Details:
  Location:
    scala/collection/immutable/HashMap$HashTrieMap.split()Lscala/collection/immutable/Seq; @249: goto
  Reason:
    Error exists in the bytecode
  Bytecode:
    0000000: 2ab6 0064 04a0 001e b200 c1b2 00c6 04bd
    0000010: 0002 5903 2a53 c000 c8b6 00cc b600 d0c0
    0000020: 00d2 b02a b600 38b8 0042 3c1b 04a4 0156
    0000030: 1b05 6c3d 2a1b 056c 2ab6 0038 b700 d43e
    0000040: 2ab6 0038 021d 787e 3604 2ab6 0038 0210
    0000050: 201d 647c 7e36 05bb 0019 59b2 00c6 2ab6
    0000060: 003a c000 c8b6 00d8 b700 db1c b600 df3a
    0000070: 0619 06c6 001a 1906 b600 e3c0 008b 3a07
    0000080: 1906 b600 e6c0 008b 3a08 a700 0dbb 00e8
    0000090: 5919 06b7 00eb bf19 073a 0919 083a 0abb
    00000a0: 0002 5915 0419 09bb 0019 59b2 00c6 1909
    00000b0: c000 c8b6 00d8 b700 db03 b800 f13a 0e3a
    00000c0: 0d03 190d b900 f501 0019 0e3a 1136 1036
    00000d0: 0f15 0f15 109f 0027 150f 0460 1510 190d
    00000e0: 150f b900 f802 00c0 0005 3a17 1911 1917
    00000f0: b800 fc3a 1136 1036 0fa7 ffd8 1911 b801
    0000100: 00b7 0069 3a0b bb00 0259 1505 190a bb00
    0000110: 1959 b200 c619 0ac0 00c8 b600 d8b7 00db
    0000120: 03b8 00f1 3a13 3a12 0319 12b9 00f5 0100
    0000130: 1913 3a16 3615 3614 1514 1515 9f00 2715
    0000140: 1404 6015 1519 1215 14b9 00f8 0200 c000
    0000150: 053a 1819 1619 18b8 0103 3a16 3615 3614
    0000160: a7ff d819 16b8 0100 b700 693a 0cbb 0105
    0000170: 5919 0bbb 0105 5919 0cb2 010a b701 0db7
    0000180: 010d b02a b600 3a03 32b6 010f b0       
  Stackmap Table:
    same_frame(@35)
    full_frame(@141,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118]},{})
    append_frame(@151,Object[#139],Object[#139])
    full_frame(@209,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Top,Top,Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#159],Uninitialized[#159],Integer,Object[#139]})
    full_frame(@252,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Top,Top,Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#159],Uninitialized[#159],Integer,Object[#139]})
    full_frame(@312,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Object[#2],Top,Object[#25],Object[#62],Integer,Integer,Object[#116],Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#262],Uninitialized[#262],Integer,Object[#139]})
    full_frame(@355,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#118],Object[#139],Object[#139],Object[#139],Object[#139],Object[#2],Top,Object[#25],Object[#62],Integer,Integer,Object[#116],Object[#25],Object[#62],Integer,Integer,Object[#116]},{Uninitialized[#262],Uninitialized[#262],Integer,Object[#139]})
    full_frame(@387,{Object[#2],Integer},{})

	at scala.collection.immutable.HashMap$.scala$collection$immutable$HashMap$$makeHashTrieMap(HashMap.scala:181)
	at scala.collection.immutable.HashMap$HashMap1.updated0(HashMap.scala:216)
	at scala.collection.immutable.HashMap.updated(HashMap.scala:58)
	at scala.collection.immutable.Map$Map4.updated(Map.scala:224)
	at scala.collection.immutable.Map$Map4.$plus(Map.scala:225)
	at scala.collection.immutable.Map$Map4.$plus(Map.scala:197)
	at scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:29)
	at scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:25)
	at scala.collection.TraversableOnce.$anonfun$toMap$1(TraversableOnce.scala:316)
	at scala.collection.TraversableOnce$$Lambda$27/1787079037.apply(Unknown Source)
	at scala.collection.immutable.List.foreach(List.scala:388)
	at scala.collection.TraversableOnce.toMap(TraversableOnce.scala:315)
	at scala.collection.TraversableOnce.toMap$(TraversableOnce.scala:313)
	at scala.collection.AbstractTraversable.toMap(Traversable.scala:104)
	at scala.concurrent.duration.Duration$.<init>(Duration.scala:88)
	at scala.concurrent.duration.Duration$.<clinit>(Duration.scala)
	at scala.concurrent.duration.package$DurationInt$.durationIn$extension(package.scala:44)
	at scala.concurrent.duration.package$DurationInt.durationIn(package.scala:44)
	at scala.concurrent.duration.DurationConversions.seconds(DurationConversions.scala:33)
	at scala.concurrent.duration.DurationConversions.seconds$(DurationConversions.scala:33)
	at scala.concurrent.duration.package$DurationInt.seconds(package.scala:43)
	at org.apache.flink.runtime.akka.AkkaUtils$.<init>(AkkaUtils.scala:52)
	at org.apache.flink.runtime.akka.AkkaUtils$.<clinit>(AkkaUtils.scala)
	at org.apache.flink.runtime.akka.AkkaUtils.getFlinkActorSystemName(AkkaUtils.scala)
	at org.apache.flink.runtime.clusterframework.BootstrapTools.startActorSystem(BootstrapTools.java:114)
	at org.apache.flink.runtime.clusterframework.BootstrapTools.startActorSystem(BootstrapTools.java:87)
	at org.apache.flink.runtime.rpc.akka.AkkaRpcServiceUtils.createRpcService(AkkaRpcServiceUtils.java:84)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.createRpcService(ClusterEntrypoint.java:272)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.initializeServices(ClusterEntrypoint.java:248)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.runCluster(ClusterEntrypoint.java:202)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.lambda$startCluster$0(ClusterEntrypoint.java:164)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint$$Lambda$21/1233990028.call(Unknown Source)
	at org.apache.flink.runtime.security.HadoopSecurityContext$$Lambda$22/2036127838.run(Unknown Source)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754)
	at org.apache.flink.runtime.security.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
	at org.apache.flink.runtime.entrypoint.ClusterEntrypoint.startCluster(ClusterEntrypoint.java:163)
	... 2 more

經過Google後發現是因爲scala的版本是2.12的和JDK的版本不匹配,我目前的JDK版本是1.8.0_11,需要的版本是1.8.0_111

但是我更換了JDK的版本後還是報這個錯.

換了各種JDK的版本,試了都不行,全部報這個錯.

最後沒版本就下載了一個依賴scala2.11版本的Flink,最終才成功了,集羣可以啓動

如果有寫的不對的地方,歡迎大家指正,如果有什麼疑問,可以加QQ羣:340297350,更多的Flink和spark的乾貨可以加入下面的星球

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章