2014-10-29 2 views
2

SparkContext을 사용하는 간단한 단위 테스트가 있습니다. IntelliJ Idea 내에서 단위 테스트를 아무 문제없이 실행할 수 있습니다. 그러나 나는 다음과 같은 오류 얻을 SBT 쉘에서 동일한 테스트를 실행하려고 할 때 : I가 다음의 모든 종속성을 추가하는 시도SBT에서 Apache Spark 관련 단위 테스트를 실행할 수 없습니다 - NoClassDefFoundError

import sbt._ 
import Keys._ 
import org.scalatra.sbt._ 
import org.scalatra.sbt.PluginKeys._ 
import com.mojolly.scalate.ScalatePlugin._ 
import ScalateKeys._ 
import com.github.retronym.SbtOneJar 
import com.earldouglas.xsbtwebplugin.PluginKeys._ 
import com.earldouglas.xsbtwebplugin.WebPlugin._ 
import sbtassembly.Plugin._ 
import AssemblyKeys._ 
import sbtassembly.Plugin.AssemblyKeys._ 


object myservice extends Build { 
    val Organization = "com.company" 
    val Name = "myservice" 
    val Version = "0.1.0-SNAPSHOT" 
    val ScalaVersion = "2.10.4" 
    val ScalatraVersion = "2.3.0" 

    // settings for sbt-assembly plugin 
    val myAssemblySettings = assemblySettings ++ Seq(

    // handle conflicts during assembly task 
    mergeStrategy in assembly <<= (mergeStrategy in assembly) { 
     (old) => { 
     case PathList("javax", "servlet", xs @ _*)   => MergeStrategy.first 
     case PathList(ps @ _*) if ps.last endsWith ".html" => MergeStrategy.first 
     case "application.conf" => MergeStrategy.concat 
     case "unwanted.txt"  => MergeStrategy.discard 
     case x => old(x) 
     } 
    }, 

    // copy web resources to /webapp folder 
    resourceGenerators in Compile <+= (resourceManaged, baseDirectory) map { 
     (managedBase, base) => 
     val webappBase = base/"src"/"main"/"webapp" 
     for { 
      (from, to) <- webappBase ** "*" x rebase(webappBase, managedBase/"main"/"webapp") 
     } yield { 
      Sync.copy(from, to) 
      to 
     } 
    } 
) 

    lazy val project = Project (
    "myapplication", 
    file("."), 
    settings = Defaults.defaultSettings ++ ScalatraPlugin.scalatraWithJRebel ++ 
     SbtOneJar.oneJarSettings ++ 
     scalateSettings ++ Seq(
     organization := Organization, 
     name := Name, 
     version := Version, 
     scalaVersion := ScalaVersion, 
     port in container.Configuration := 8000, 
     resolvers ++= Seq (Classpaths.typesafeReleases, 
     "Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases/", 
     "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"), 

     libraryDependencies ++= Seq(
     "org.scala-lang" % "scala-reflect" % ScalaVersion, 
     "org.scala-lang" % "scala-compiler" % ScalaVersion, 

     "org.scalatest" %% "scalatest" % "2.2.1" % "test", 

     "org.scalaz" %% "scalaz-core" % "7.0.6", 

     "org.scalatra" %% "scalatra" % ScalatraVersion, 
     "org.scalatra" %% "scalatra-scalate" % ScalatraVersion, 
     "org.scalatra" %% "scalatra-specs2" % ScalatraVersion % "test", 
     "ch.qos.logback" % "logback-classic" % "1.0.6" % "runtime", 
     "commons-codec" % "commons-codec" % "1.8", 

     "org.apache.commons" % "commons-lang3" % "3.3.1", 

     "org.scalaj" %% "scalaj-http" % "0.3.16", 
     "net.liftweb" %% "lift-json" % "2.6-M4", 

     "org.eclipse.jetty" % "jetty-webapp" % "8.1.8.v20121106" % "container;compile", 
     "org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" % "container;provided;test" artifacts (Artifact("javax.servlet", "jar", "jar")), 

     "com.amazonaws" % "aws-java-sdk" % "1.8.5", 

     "org.scala-lang.modules" %% "scala-async" % "0.9.1", 

     "com.typesafe" % "config" % "1.2.1", 
     "org.scalatra" %% "scalatra-auth" % "2.3.0", 

     // RDBMS-Mysql 
     "com.typesafe.slick" %% "slick" % "2.1.0", 
     "mysql" % "mysql-connector-java" % "latest.release", 
     "com.h2database" % "h2" % "1.3.175", 

     // Geolocation 
     "com.javadocmd" % "simplelatlng" % "1.3.0", 

     // Spark and Mllib 
     "org.apache.spark" %% "spark-core" % "1.1.0", 
     "org.apache.spark" %% "spark-mllib" % "1.1.0", 
     // Lucene 
     "org.apache.lucene" % "lucene-core" % "4.8.1", 
     // for Porter Stemmer 
     "org.apache.lucene" % "lucene-analyzers-common" % "4.8.1", 
     // Guava for the dictionary 
     "com.google.guava" % "guava" % "17.0", 

     // CSV lib 
     "com.github.tototoshi" %% "scala-csv" % "1.1.0-SNAPSHOT" 

    ), 
     scalateTemplateConfig in Compile <<= (sourceDirectory in Compile){ base => 
     Seq(
      TemplateConfig(
      base/"webapp"/"WEB-INF"/"templates", 
      Seq.empty, /* default imports should be added here */ 
      Seq(
       Binding("context", "_root_.org.scalatra.scalate.ScalatraRenderContext", importMembers = true, isImplicit = true) 
      ), /* add extra bindings here */ 
      Some("templates") 
     ) 
     ) 
     } 
    ) 
).settings(myAssemblySettings:_*) 
    .settings(parallelExecution in ThisBuild := false : _*) 
} 

:

java.lang.NoClassDefFoundError: org/eclipse/jetty/server/bio/SocketConnector 
    at org.apache.spark.HttpServer.org$apache$spark$HttpServer$$doStart(HttpServer.scala:74) 
    at org.apache.spark.HttpServer$$anonfun$1.apply(HttpServer.scala:60) 
    at org.apache.spark.HttpServer$$anonfun$1.apply(HttpServer.scala:60) 
    at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446) 
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) 
    at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442) 
    at org.apache.spark.HttpServer.start(HttpServer.scala:60) 
    at org.apache.spark.HttpFileServer.initialize(HttpFileServer.scala:45) 
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:243) 
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:203) 
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:118) 
    at com.company.myservice.merging.Merger.<init>(Merger.scala:16) 
    at com.company.myservice.merging.MergerTest.<init>(MergerTest.scala:16) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526) 
    at java.lang.Class.newInstance(Class.java:374) 
    at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:641) 
    at sbt.TestRunner.runTest$1(TestFramework.scala:84) 
    at sbt.TestRunner.run(TestFramework.scala:94) 
    at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:224) 
    at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:224) 
    at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:212) 
    at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:224) 
    at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:224) 
    at sbt.TestFunction.apply(TestFramework.scala:229) 
    at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211) 
    at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217) 
    at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217) 
    at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45) 
    at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45) 
    at sbt.std.Transform$$anon$4.work(System.scala:64) 
    at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237) 
    at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237) 
    at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18) 
    at sbt.Execute.work(Execute.scala:244) 
    at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237) 
    at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237) 
    at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160) 
    at sbt.CompletionService$$anon$2.call(CompletionService.scala:30) 
    at java.util.concurrent.FutureTask.run(FutureTask.java:262) 
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) 
    at java.util.concurrent.FutureTask.run(FutureTask.java:262) 
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) 
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) 
    at java.lang.Thread.run(Thread.java:745) 

이처럼 내 build.scala 보이는 무엇을 성공없이 내 build.scala :

"org.eclipse.jetty" % "jetty-http" % "9.2.1.v20140609" % "container;test", 
    "org.eclipse.jetty" % "jetty-plus" % "9.2.1.v20140609" % "container;test", 
    "org.eclipse.jetty" % "jetty-io" % "9.2.1.v20140609" % "container;test", 
    "org.eclipse.jetty" % "jetty-security" % "9.2.1.v20140609" % "container;test", 
    "org.eclipse.jetty" % "jetty-server" % "9.2.1.v20140609" % "container;test", 
    "org.eclipse.jetty" % "jetty-servlet" % "9.2.1.v20140609" % "container;test", 
    "org.eclipse.jetty" % "jetty-webapp" % "9.2.1.v20140609" % "container;test", 
    "org.eclipse.jetty" % "jetty-util" % "9.2.1.v20140609" % "container;test", 
+0

를 참조하십시오, 그러나 나는 수입 LocalSparkContext과 친구를 얻는 붙어있어. 컴파일 된 소스 (필자의 경우 spark-assembly-1.2.0-hadoop2.0.0-mr1-cdh4.2.0.jar)와 함께 제공되는 jar에는 테스트 소스가 없습니다. 어떤 도움을 주시면 감사하겠습니다! –

답변

2

부두 9.2.1이 포함되어 있지 않습니다 org/eclipse/jetty/server/bio/SocketConnector

그것은 부두 9에서 완전히 제거되었습니다.

또한 Jetty 버전이 혼합되어 있습니다.

"org.eclipse.jetty" % "jetty-webapp" % "8.1.8.v20121106" % "container;compile", 
"org.eclipse.jetty" % "jetty-webapp" % "9.2.1.v20140609" % "container;test", 

테스트 라이브러리 (스파크?)를 사용하여 Jetty 9를 지원하도록 업그레이드하거나 귀하의 Jetty 버전을 8.1.8로 다운 그레이드하십시오.

기록 : SPDY/NPN/ALPN/HTTP2를 지원하는 Jetty 9의 작업은 전체 커넥터 아키텍처가 정밀 검사를 거쳤다는 것을 의미했습니다. 우선 커넥터는 모든 커넥터를 차단하고 커넥터 만 남겨 둡니다. 그런 다음 모든 새로운 프로토콜 협상 단계를 지원하기 위해 모든 커넥터가 새로운 org.eclipse.jetty.server.ServerConnector으로 병합되었습니다. ServerConnector에는 암호화, 프로토콜, 원하는 협상 순서 및 폴백을 지정하는 구성이있었습니다.

참고 : 부두 7 및 부두 8은 EOL에 근접해 있습니다 (2014 년 말 공식 예정).

내가 스파크-1.2.0 소스를 사용하여 인 IntelliJ에 시험 설치를 만들기 위해 노력하고있어 http://dev.eclipse.org/mhonarc/lists/jetty-announce/msg00069.html

+0

부두 7.x로 다운 그레이드가 성공을 입증했습니다. –

관련 문제