3

spark が sbt でテストを実行しているとき。私はこの例外を受け取ります:

18:58:49.049 [sparkDriver-akka.actor.default-dispatcher-2] ERROR akka.actor.ActorSystemImpl - Uncaught fatal error from thread [sparkDriver-akka.remote.default-remote-dispatcher-5] shutting down ActorSystem [sparkDriver]
java.lang.VerifyError: (class: org/jboss/netty/channel/socket/nio/NioWorkerPool, method: createWorker signature: (Ljava/util/concurrent/Executor;)Lorg/jboss/netty/channel/socket/nio/AbstractNioWorker;) Wrong return type in function
at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:283) ~[akka-remote_2.11-2.3.4-spark.jar:na]
at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:240) ~[akka-remote_2.11-2.3.4-spark.jar:na]
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_45]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_45]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_45]
at java.lang.reflect.Constructor.newInstance(Constructor.java:422) ~[na:1.8.0_45]
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78) ~[akka-actor_2.11-2.3.4-spark.jar:na]
at scala.util.Try$.apply(Try.scala:192) ~[scala-library-2.11.7.jar:0.13.8]
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73) ~[akka-actor_2.11-2.3.4-spark.jar:na]
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na]
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na]
at scala.util.Success.flatMap(Try.scala:231) ~[scala-library-2.11.7.jar:0.13.8]
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na]
at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:692) ~[akka-remote_2.11-2.3.4-spark.jar:na]
at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:684) ~[akka-remote_2.11-2.3.4-spark.jar:na]
at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:728) ~[scala-library-2.11.7.jar:0.13.8]
at scala.collection.Iterator$class.foreach(Iterator.scala:742) ~[scala-library-2.11.7.jar:0.13.8]
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194) ~[scala-library-2.11.7.jar:0.13.8]
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[scala-library-2.11.7.jar:0.13.8]
at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[scala-library-2.11.7.jar:0.13.8]
at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:727) ~[scala-library-2.11.7.jar:0.13.8]
at akka.remote.EndpointManager.akka$remote$EndpointManager$$listens(Remoting.scala:684) ~[akka-remote_2.11-2.3.4-spark.jar:na]
at akka.remote.EndpointManager$$anonfun$receive$2.applyOrElse(Remoting.scala:492) ~[akka-remote_2.11-2.3.4-spark.jar:na]
at akka.actor.Actor$class.aroundReceive(Actor.scala:465) ~[akka-actor_2.11-2.3.4-spark.jar:na]
at akka.remote.EndpointManager.aroundReceive(Remoting.scala:395) ~[akka-remote_2.11-2.3.4-spark.jar:na]
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:516) [akka-actor_2.11-2.3.4-spark.jar:na]
at akka.actor.ActorCell.invoke(ActorCell.scala:487) [akka-actor_2.11-2.3.4-spark.jar:na]
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:238) [akka-actor_2.11-2.3.4-spark.jar:na]
at akka.dispatch.Mailbox.run(Mailbox.scala:220) [akka-actor_2.11-2.3.4-spark.jar:na]
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393) [akka-actor_2.11-2.3.4-spark.jar:na]
...

私が最初に考えたのは、私のサブライブラリの1つによってインポートされた netty のバージョンが間違っていることです。

依存関係グラフを調べたところ、netty の 4 つの異なるバージョンが見つかりました。 3.6.6 3.8.0 3.9.3 4.0.23

これらのバージョンは、主に spark 自身によってインポートされます :o4.0.23は spark によって直接インポートされ、 は3.8.0そのサブ依存関係によってインポートされますAkka-remote

すべてのサブ依存関係から netty 依存関係を除外し、excludeAll( ExclusionRule(organization = "io.netty"))各 netty バージョンを 1 つずつ追加しようとしました。しかし、それは問題を解決しませんでした。com.typesafe.akkaまた、使用するすべての依存関係を除外しようとしましAkka-remote 2.4.0たが、同じ問題が発生しました。

が必要とする netty バージョンAkka-remote3.8.0. そして、これを強制しても機能しません。Akka-remote:3.8.0-sparkまた、プロジェクトで、何も変更しない空間スパーク Akka バージョンを使用するように強制しました。

情報のために私のbuild.sbt

libraryDependencies ++= Seq(
  "com.datastax.cassandra"  %   "cassandra-driver-core"               % "2.1.7.1" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "com.datastax.spark"      %%  "spark-cassandra-connector"           % "1.4.0" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "com.datastax.spark"      %%  "spark-cassandra-connector-embedded"  % "1.4.0"  % Test excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "xxx.xxxxx"               %%  "shed"                                % "0.10.0-MOK-1848-DEBUG-SNAPSHOT" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "com.twitter"             %%  "util-collection"                     % "6.27.0" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "io.circe"                %%  "circe-core"                          % "0.2.0-SNAPSHOT" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "io.circe"                %%  "circe-generic"                       % "0.2.0-SNAPSHOT" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "io.circe"                %%  "circe-jawn"                          % "0.2.0-SNAPSHOT" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "io.netty"                %   "netty"                               % "3.6.6.Final",
  //"com.typesafe.akka"       %   "akka-remote_2.11"                    % "2.3.4",
  "org.apache.cassandra"    %   "cassandra-all"                       % "2.1.5" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "org.apache.cassandra"    %   "cassandra-thrift"                    % "2.0.5" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "org.apache.spark"        %%  "spark-streaming-kafka"               % "1.4.0" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "org.apache.spark"        %%  "spark-streaming"                     % sparkVersion % "provided" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "org.apache.spark"        %%  "spark-core"                          % sparkVersion % "provided" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "org.apache.spark"        %%  "spark-sql"                           % sparkVersion % "provided" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "org.scalaz.stream"       %   "scalaz-stream_2.11"                  % "0.7.3" % Test excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")),
  "org.specs2"              %%  "specs2-core"                         % "3.6.1-scalaz-7.0.6" % "test" excludeAll( ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka"))
)

ご覧のとおり、試行から最大のものを除外しようとしています

サブプロジェクトの小屋には含まれています

 "com.github.scopt"           %% "scopt"                % "3.3.0" ,
    "com.typesafe.akka"          %% "akka-testkit"         % "2.3.8"               % "test",
    "com.typesafe.play"          %% "play-test"            % "2.3.8"               % "test",
    "com.tinkerpop.gremlin"      %  "gremlin-java"         % gremlinVersion,
    "com.tinkerpop"              %  "pipes"                % gremlinVersion,
    "com.thinkaurelius.titan"    %  "titan-core"           % titanVersion,
    "com.thinkaurelius.titan"    %  "titan-cassandra"      % titanVersion,
    "com.thinkaurelius.titan"    %  "titan-berkeleyje"     % titanVersion,
    "com.netaporter"             %% "scala-uri"            % "0.4.8",
    "com.github.nscala-time"     %% "nscala-time"          % "1.8.0",
    "com.mandubian"              %% "play-json-zipper"     % "1.2",
    "com.michaelpollmeier"       %% "gremlin-scala"        % "2.6.1",
    "com.ansvia.graph"           %% "blueprints-scala"     % "0.1.61-20150416-SNAPSHOT",
    "io.kanaka"                  %% "play-monadic-actions" % "1.0.1" exclude("com.typesafe.play", "play_2.11"),
    "org.scalaz"                 %% "scalaz-concurrent"    % "7.0.6",
    "com.chuusai"                %% "shapeless"            % "2.3.0-SNAPSHOT",
    ("io.github.jto"             %% "validation-json"      % "1.0").exclude("org.tpolecat", "tut-core_2.11"),
    "org.parboiled"              %% "parboiled"            % "2.1.0",
    "com.typesafe.scala-logging" %% "scala-logging"        % "3.1.0",
    "ch.qos.logback"             %  "logback-classic"      % "1.1.2",
    "xxx.xxxxxxxxxx"             %% "chuck"                % "0.9.0-SNAPSHOT",
    "xxx.xxxxxxxxxx"             %% "shed-graph"           % "0.9.0-MOK-1848-SNAPSHOT" exclude("com.thinkaurelius.titan", "titan-core"),
    "io.circe"                   %% "circe-core"           % "0.2.0-SNAPSHOT",
    "io.circe"                   %% "circe-generic"        % "0.2.0-SNAPSHOT",
    "io.circe"                   %% "circe-jawn"           % "0.2.0-SNAPSHOT"
4

1 に答える 1

6

それは罠だ !

netty 組織は過去に変更されました。org.jboss.netty から io.netty までですが、同じパッケージが含まれています。

exclude("org.jboss.netty", "netty") は私の問題を解決します。

于 2015-11-10T17:32:51.780 に答える