Unable to run code coverage using sbt4jacoco - sbt

I was following jacoco4sbt wiki to run Junit test cases of my existing project using jacoco. I am getting following exception while running below command.
$ activator jacoco:cover
[info] Loading project definition from /data/test/project
[info] Set current project to test (in build file:/data/test/)
java.io.IOException: Error while instrumenting class Routes$$anonfun$routes$1.class.
at org.jacoco.core.instr.Instrumenter.instrumentError(Instrumenter.java:152)
at org.jacoco.core.instr.Instrumenter.instrument(Instrumenter.java:124)
at de.johoop.jacoco4sbt.Instrumentation$$anonfun$instrumentAction$3.apply(Instrumentation.scala:49)
at de.johoop.jacoco4sbt.Instrumentation$$anonfun$instrumentAction$3.apply(Instrumentation.scala:46)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at de.johoop.jacoco4sbt.Instrumentation$class.instrumentAction(Instrumentation.scala:46)
at de.johoop.jacoco4sbt.JacocoPlugin$jacoco$.instrumentAction(JacocoPlugin.scala:59)
at de.johoop.jacoco4sbt.JacocoPlugin$SharedSettings$$anonfun$settings$5.apply(JacocoPlugin.scala:84)
at de.johoop.jacoco4sbt.JacocoPlugin$SharedSettings$$anonfun$settings$5.apply(JacocoPlugin.scala:84)
at scala.Function6$$anonfun$tupled$1.apply(Function6.scala:35)
at scala.Function6$$anonfun$tupled$1.apply(Function6.scala:34)
at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:42)
at sbt.std.Transform$$anon$4.work(System.scala:64)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
at sbt.Execute.work(Execute.scala:244)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.RuntimeException: java.lang.ClassNotFoundException: play.api.mvc.Handler
at org.objectweb.asm.ClassWriter.getCommonSuperClass(ClassWriter.java:1684)
at org.objectweb.asm.ClassWriter.getMergedType(ClassWriter.java:1654)
at org.objectweb.asm.Frame.merge(Frame.java:1426)
at org.objectweb.asm.Frame.merge(Frame.java:1325)
at org.objectweb.asm.MethodWriter.visitMaxs(MethodWriter.java:1475)
at org.objectweb.asm.ClassReader.readCode(ClassReader.java:1554)
at org.objectweb.asm.ClassReader.readMethod(ClassReader.java:1017)
at org.objectweb.asm.ClassReader.accept(ClassReader.java:693)
at org.objectweb.asm.ClassReader.accept(ClassReader.java:506)
at org.objectweb.asm.ClassWriter.toByteArray(ClassWriter.java:995)
at org.jacoco.core.instr.Instrumenter.instrument(Instrumenter.java:84)
at org.jacoco.core.instr.Instrumenter.instrument(Instrumenter.java:122)
at de.johoop.jacoco4sbt.Instrumentation$$anonfun$instrumentAction$3.apply(Instrumentation.scala:49)
at de.johoop.jacoco4sbt.Instrumentation$$anonfun$instrumentAction$3.apply(Instrumentation.scala:46)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at de.johoop.jacoco4sbt.Instrumentation$class.instrumentAction(Instrumentation.scala:46)
at de.johoop.jacoco4sbt.JacocoPlugin$jacoco$.instrumentAction(JacocoPlugin.scala:59)
at de.johoop.jacoco4sbt.JacocoPlugin$SharedSettings$$anonfun$settings$5.apply(JacocoPlugin.scala:84)
at de.johoop.jacoco4sbt.JacocoPlugin$SharedSettings$$anonfun$settings$5.apply(JacocoPlugin.scala:84)
at scala.Function6$$anonfun$tupled$1.apply(Function6.scala:35)
at scala.Function6$$anonfun$tupled$1.apply(Function6.scala:34)
at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:42)
at sbt.std.Transform$$anon$4.work(System.scala:64)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
at sbt.Execute.work(Execute.scala:244)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
[error] (jacoco:fullClasspath) java.io.IOException: Error while instrumenting class Routes$$anonfun$routes$1.class.
[error] Total time: 5 s, completed Feb 5, 2015 10:36:58 PM
Please see my build.sbt below
name := "test"
version := "1.0-SNAPSHOT"
playJavaSettings
ebeanEnabled := false
jacoco.settings
playJavaSettings ++ QueryDSLPlugin.queryDSLSettings
val current = project.in(file(".")).configs(QueryDSLPlugin.QueryDSL)
libraryDependencies ++= Seq(
javaCore,
cache,
javaJpa,
javaJpa.exclude("org.hibernate.javax.persistence", "hibernate-jpa-2.0-api"),
"org.apache.axis" % "axis" % "1.4",
"org.apache.axis" % "axis-jaxrpc" % "1.4",
"commons-discovery" % "commons-discovery" % "0.5",
"wsdl4j" % "wsdl4j" % "1.6.2",
"org.springframework" % "spring-context" % "4.1.0.RELEASE",
"javax.inject" % "javax.inject" % "1",
"org.springframework.data" % "spring-data-jpa" % "1.7.0.RELEASE",
"org.springframework" % "spring-expression" % "4.1.0.RELEASE",
"org.hibernate" % "hibernate-entitymanager" % "4.3.7.Final",
"org.hibernate.javax.persistence" % "hibernate-jpa-2.1-api" % "1.0.0.Final",
"org.apache.httpcomponents" % "httpclient" % "4.3.5",
"org.json" % "json" % "20090211",
"org.jasypt" % "jasypt" % "1.9.0",
"org.mockito" % "mockito-core" % "1.9.5" % "test",
"com.github.jknack" % "handlebars" % "1.3.2",
"org.springframework" % "spring-context-support" % "3.2.2.RELEASE",
"javax.mail" % "mail" % "1.5.0-b01",
"com.typesafe.akka" % "akka-kernel_2.10" % "2.1.0",
"org.quartz-scheduler" % "quartz" % "2.2.1",
"org.quartz-scheduler" % "quartz-jobs" % "2.2.1",
"com.datastax.cassandra" % "cassandra-driver-core" % "2.1.1",
"com.datastax.cassandra" % "cassandra-driver-mapping" % "2.1.1",
"junit" % "junit" % "4.11",
"com.amazonaws" % "aws-java-sdk" % "1.9.6",
"com.twilio.sdk" % "twilio-java-sdk" % "3.4.5",
"com.atlassian.jira" % "jira-rest-java-client-core" % "2.0.0-m25",
"com.atlassian.jira" % "jira-rest-java-client-api" % "2.0.0-m25",
"commons-io" % "commons-io" % "2.2",
"org.apache.commons" % "commons-csv" % "1.0",
"com.jcraft" % "jsch" % "0.1.50",
"net.sf.opencsv" % "opencsv" % "2.3",
"com.google.guava" % "guava" % "18.0"
)
and /project/plugins.sbt
// Comment to get more information during initialization
logLevel := Level.Warn
// The Typesafe repository
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
// Use the Play sbt plugin for Play projects
addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.0")
addSbtPlugin("com.code-troopers.play" % "play-querydsl" % "0.1.1")
addSbtPlugin("de.johoop" % "jacoco4sbt" % "2.1.6")
I am able see available plugins using following command
$ activator about
[info] Loading project definition from /data/test/project
[info] Set current project to test (in build file:/data/test/)
[info] This is sbt 0.13.0
[info] The current project is {file:/data/test/}current 1.0-SNAPSHOT
[info] The current project is built against Scala 2.10.2
[info] Available Plugins: play.Project, com.typesafe.sbteclipse.plugin.EclipsePlugin, org.sbtidea.SbtIdeaPlugin, com.typesafe.sbt.SbtNativePackager, QueryDSLPlugin, de.johoop.jacoco4sbt.JacocoPlugin
[info] sbt, sbt plugins, and build definitions are using Scala 2.10.
Also, I am able to run the test cases using below command.
$ activator test
Am I missing any configuration for jacoco4sbt?

Related

How to add dependent project in build.sbt for running sbt compile

I am new to sbt build.
I would like to add java files of a dependent project (say Proj A) to my compiling project (Proj B).
Running sbt compile in Proj B throws error that dependent project's java package/classes are not found.
I went through the link: https://www.scala-sbt.org/0.13/docs/Multi-Project.html but its not clear to me add this dependency to make it work.
I tried adding a below line in build.sbt, but it didnt work.
lazy val projB = project.dependsOn(/projA)
Updated
build.sbt of projB:
organization := "com.org"
name := "projB"
version := "1"
resolvers ++= Seq(
"Typesafe" at "http://repo.typesafe.com/typesafe/releases/",
"Java.net Maven2 Repository" at "http://download.java.net/maven/2/",
)
lazy val projB = project.dependsOn(projA)
// the library dependencies of springframework here
build.sbt of Proj A:
organization := "com.org"
name := "proj A"
version := "1"
resolvers ++= Seq(
"Typesafe" at "http://repo.typesafe.com/typesafe/releases/",
"Java.net Maven2 Repository" at "http://download.java.net/maven/2/",
)
// the library dependencies of springframework here
When i do sbt compile on proj B, it throws error the dependent classes are not found. Class Hbase is in Proj A.
[error] import com.org.config.Hbase;
[error] **\hbase\HbaseDAO.java:38:1:
cannot find symbol
[error] symbol: class Hbase
[error] location: class com.org.hbase.HbaseDAO
[error] private Hbase hbase;
[error] (Compile / compileIncremental) javac returned non-zero exit code
[error] Total time: 6 s, completed 29/08/2019 9:58:39 AM
Updated build.sbt after the suggestion:
inThisBuild(
Seq(
organization := "com.org",
version := "1",
resolvers ++= Seq(
"Typesafe" at "http://repo.typesafe.com/typesafe/releases/",
"Java.net Maven2 Repository" at "http://download.java.net/maven/2/",
)
)
)
lazy val root = project
.in(file("."))
.aggregate(projA,projB)
lazy val projA = project.settings(
// project A settings and library dependencies
libraryDependencies += "org.springframework.boot" % "spring-boot-starter-
parent" % "2.1.6.RELEASE" pomOnly()
libraryDependencies += "org.springframework.boot" % "spring-boot-starter-
web" % "2.1.6.RELEASE"
libraryDependencies += "org.springframework.data" % "spring-data-hadoop-
hbase" % "2.3.0.RELEASE"
libraryDependencies += "org.mortbay.jetty" % "jetty" % "7.0.0.pre5"
libraryDependencies += "io.netty" % "netty-all" % "5.0.0.Alpha2"
libraryDependencies += "commons-beanutils" % "commons-beanutils" % "1.9.4"
libraryDependencies += "commons-beanutils" % "commons-beanutils-core" %
"1.8.3"
libraryDependencies += "xerces" % "xercesImpl" % "2.12.0"
libraryDependencies += "org.apache.hadoop" % "hadoop-yarn-server-
nodemanager" % "3.2.0"
libraryDependencies += "org.apache.hadoop" % "hadoop-common" % "3.2.0"
libraryDependencies += "org.apache.hadoop" % "hadoop-common" % "2.7.0"
libraryDependencies += "org.apache.hadoop" % "hadoop-client" % "3.2.0"
libraryDependencies += "org.apache.hbase" % "hbase-client" % "2.1.1"
libraryDependencies += "org.apache.hbase" % "hbase" % "2.1.1" pomOnly()
libraryDependencies += "org.apache.hbase" % "hbase-common" % "2.1.1"
)
lazy val projB = project
.dependsOn(projA)
.settings(
// project B settings and library dependencies
libraryDependencies += "org.springframework.boot" % "spring-boot-starter-
parent" % "2.1.6.RELEASE" pomOnly()
libraryDependencies += "org.springframework.boot" % "spring-boot-starter-
web" % "2.1.6.RELEASE"
libraryDependencies += "org.springframework.data" % "spring-data-hadoop-
hbase" % "2.3.0.RELEASE"
libraryDependencies += "org.mortbay.jetty" % "jetty" % "7.0.0.pre5"
libraryDependencies += "io.netty" % "netty-all" % "5.0.0.Alpha2"
libraryDependencies += "commons-beanutils" % "commons-beanutils" % "1.9.4"
libraryDependencies += "commons-beanutils" % "commons-beanutils-core" %
"1.8.3"
libraryDependencies += "xerces" % "xercesImpl" % "2.12.0"
libraryDependencies += "org.apache.hadoop" % "hadoop-yarn-server-
nodemanager" % "3.2.0"
libraryDependencies += "com.fasterxml.jackson.core" % "jackson-databind" %
"2.10.0.pr2"
libraryDependencies += "org.apache.hadoop" % "hadoop-common" % "3.2.0"
libraryDependencies += "org.apache.hadoop" % "hadoop-client" % "3.2.0"
libraryDependencies += "org.apache.hbase" % "hbase-client" % "2.1.1"
libraryDependencies += "org.apache.hbase" % "hbase" % "2.1.1" pomOnly()
libraryDependencies += "org.apache.hbase" % "hbase-common" % "2.1.1"
)
An error is thrown while sbt compile after the below library dependency in both project settings projA and projB
libraryDependencies += "org.springframework.boot" % "spring-boot-starter-
web" % "2.1.6.RELEASE"
')' expected but string literal found is thrown for this line in projA settings and
';' expected but string literal found is thrown for this line in projB settings.
I couldnt get much clue with this err.
Looking at the two snippets you posted, I'm guessing that you have two separate build.sbt files, one for each subproject. This makes them independent and one project just doesn't see the other. While it may be possible to have multiple build.sbt files for the subprojects, it's recommended to define the whole multiproject build in a single build.sbt file in the root of the project.
For example, if you structure your project like this:
├── project
│ ├── build.properties
│ └── plugins.sbt
├── projA
│ └── src
├── projB
│ └── src
└── build.sbt
Then you can put all the build settings and subproject relations in the root build.sbt:
inThisBuild(
Seq(
organization := "com.org",
version := "1",
resolvers ++= Seq(
"Typesafe" at "http://repo.typesafe.com/typesafe/releases/",
"Java.net Maven2 Repository" at "http://download.java.net/maven/2/",
)
)
)
lazy val root = project
.in(file("."))
.aggregate(projA, projB)
lazy val projA = project
.settings(
// project A settings and library dependencies
)
lazy val projB = project
.dependsOn(projA)
.settings(
// project B settings and library dependencies
)
Then if you launch an sbt shell from the root of the project, you can call compile (or any other task) to compile both projA and projB, or you can call projA/compile to compile that subproject specifically.
You are already reading documentation, so you know where to find more information. Notice that the link you provided points to the old documentation, at the top there is a banner pointing to the new page: https://www.scala-sbt.org/1.x/docs/Multi-Project.html

sbt assembly akka-stream_2.12:2.4.19 and akka-actor_2.12:2.4.19 are evicted

I want to make fat jar of my project with sbt assembly. Here is my build.sbt:
name := "projName"
version := "1.0"
scalaVersion := "2.12.1"
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-stream" % "2.5.3",
"com.typesafe.akka" %% "akka-actor" % "2.5.3",
"com.typesafe.akka" %% "akka-http" % "10.0.8",
"com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.8.6",
"com.typesafe" % "config" % "1.3.1",
"org.apache.kafka" % "kafka-clients" % "0.10.2.1",
"org.knowm.xchange" % "xchange-core" % "4.2.0",
"org.knowm.xchange" % "xchange-btce" % "4.2.0"
)
assemblyMergeStrategy in assembly := {
case PathList("META-INF", xs # _*) => MergeStrategy.discard
case x => MergeStrategy.first
}
After running
sbt assembly
i got such warning:
[warn] There may be incompatibilities among your library dependencies.
[warn] Here are some of the libraries that were evicted:
[warn] * com.typesafe.akka:akka-stream_2.12:2.4.19 -> 2.5.3
[warn] * com.typesafe.akka:akka-actor_2.12:2.4.19 -> 2.5.3
[warn] Run 'evicted' to see detailed eviction warnings
Then i run sbt evicted for more detail and here what i got:
[warn] There may be incompatibilities among your library dependencies.
[warn] Here are some of the libraries that were evicted:
[warn] * com.typesafe.akka:akka-stream_2.12:2.4.19 -> 2.5.3 (caller: com.typesafe.akka:akka-http-core_2.12:10.0.8, default:btceclient_2.12:1.0)
[warn] * com.typesafe.akka:akka-actor_2.12:2.4.19 -> 2.5.3 (caller: com.typesafe.akka:akka-stream_2.12:2.5.3, com.typesafe.akka:akka-parsing_2.12:10.0.8, default:btceclient_2.12:1.0)
[info] Here are other libraries that were evicted:
[info] * com.fasterxml.jackson.core:jackson-databind:2.8.2 -> 2.8.6 (caller: com.fasterxml.jackson.module:jackson-module-paranamer:2.8.6, com.github.mmazi:rescu:1.9.0, com.fasterxml.jackson.module:jackson-module-scala_2.12:2.8.6)
[info] * com.fasterxml.jackson.core:jackson-annotations:2.8.0 -> 2.8.6 (caller: com.fasterxml.jackson.core:jackson-databind:2.8.6, com.fasterxml.jackson.module:jackson-module-scala_2.12:2.8.6)
[info] * com.typesafe:config:1.2.0 -> 1.3.1 (caller: com.typesafe:ssl-config-core_2.12:0.2.1, default:btceclient_2.12:1.0, com.typesafe.akka:akka-actor_2.12:2.5.3)
I know what are eviction warnings but i don't know how to solve that in current situation. Any ideas?
Modified merge strategy:
assemblyMergeStrategy in assembly := {
case PathList("META-INF", xs # _*) => MergeStrategy.discard
case PathList("reference.conf") => MergeStrategy.concat
case x => MergeStrategy.first
}
Here you can see new line: case PathList("reference.conf") => MergeStrategy.concat .
I think it is needless to explain what happens here.
You can safely ignore this message because 2.4 and 2.5 versions are binary compatible, so it is okay to upgrade from 2.4 to 2.5.
The eviction message is clear. A quick search in maven for akka-http_2.12 : 10.0.8 will show you that this artifact depends of akka-stream_2.12:2.4.19.
One solution, you could downgrade akka-actor and akka-stream to 2.4.19 in the dependencies setting.

How can I add process parameters using sbt-native-packager?

How can I add process parameters using sbt-native-packager configuration? I want to add the options for redirect process stderr to file? To have the result like that:
sudo -u app bash -c "app >>/var/log/app/stderr.log 2>&1"
I use sbt-native-packager 1.2.0-M5 for build deb package with JavaServerAppPackaging, JDebPackaging, SystemdPlugin, UpstartPlugin the exception in logs, only in stderr. Also I must delete app pid manually after crash and if it exists, then I have error in stderr.
My plugins.sbt:
resolvers += Resolver.bintrayRepo("sbt", "sbt-plugin-releases")
// The Play plugin
addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.5.8-netty-4.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.2.0-M5")
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.8.2")
addSbtPlugin("com.lightbend.sbt" % "sbt-javaagent" % "0.1.1")
libraryDependencies += "org.vafer" % "jdeb" % "1.3" artifacts (Artifact("jdeb", "jar", "jar"))
my build.sbt:
...
debianPackageDependencies in Debian ++= Seq("postgresql-9.5 (>= 9.5.1)")
lazy val root = (project in file(".")).enablePlugins(PlayScala, JavaAgent)
scalaVersion := "2.11.8"
val akkaVersion = "2.4.10"
libraryDependencies ++= Seq(
"org.postgresql" % "postgresql" % "9.4.1208",
"org.scalikejdbc" %% "scalikejdbc" % "2.4.0",
"org.scalikejdbc" %% "scalikejdbc-config" % "2.4.0",
"org.scalikejdbc" %% "scalikejdbc-play-initializer" % "2.5.1",
"org.flywaydb" %% "flyway-play" % "3.0.1",
"com.typesafe.akka" %% "akka-contrib" % akkaVersion,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion,
"io.dropwizard.metrics" % "metrics-core" % "3.1.2",
"io.dropwizard.metrics" % "metrics-jvm" % "3.1.2",
"org.coursera" % "dropwizard-metrics-datadog" % "1.1.4",
"com.typesafe.akka" %% "akka-testkit" % akkaVersion % Test,
"com.relayrides" % "pushy" % "0.8",
"com.relayrides" % "pushy-dropwizard-metrics-listener" % "0.8",
"org.eclipse.jetty.alpn" % "alpn-api" % "1.1.3.v20160715" % "runtime",
ws,
specs2 % Test
)
resolvers += "Typesafe Releases" at "http://repo.typesafe.com/typesafe/maven-releases/"
resolvers += Resolver.mavenLocal
routesGenerator := InjectedRoutesGenerator
javaOptions in Test ++= Seq("-Dlogger.resource=logback-test.xml")
scalacOptions in Universal ++= Seq("-unchecked", "-deprecation", "-notailcalls")
javaOptions in Universal ++= Seq(
"-J-server",
...
)
...
import com.typesafe.sbt.packager.archetypes.systemloader._
// UpstartPlugin for ubuntu 14.04, SystemdPlugin for ubuntu 16.04
enablePlugins(JavaServerAppPackaging, JDebPackaging, SystemdPlugin, UpstartPlugin)
requiredStartFacilities := Some("datadog-agent.service, systemd-journald.service, postgresql.service")
javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.4" % "dist"
ps I found a workaround, in ubuntu 16.04 I can use journald to collect all the logs in the system.
Thanks for updating the question with all relevant information. There are a couple of things here.
Only one Systemloader plugin
You enable SystemdPlugin and UpstartPlugin. If it works, it only works by accident. No version of native-packager was designed to support multiple systemloader for a single package type in a single build module.
The solution is to create sub modules with the relevant systemloader enabled.
Logging to stderr
You are right regarding systemd. It provides facilities to capture the log output of your process. If you like you can add your findings to the native-packager documentation ( there is a systemd plugin section ).
The upstart support in native-packager is rather simple. There weren't a lot of requeset as Ubuntu is switching to systemd and you can always fallback to systemv. Which brings me to the solution to your problem.
You can use the SystemVPlugin, which supports a daemon_log_file. The systemv documentation provides you with the necessary details.
cheers,
Muki

KafkaUtils class not found in Spark streaming

I have just began with Spark Streaming and I am trying to build a sample application that counts words from a Kafka stream. Although it compiles with sbt package, when I run it, I get NoClassDefFoundError. This post seems to have the same problem, but the solution is for Maven and I have not been able to reproduce it with sbt.
KafkaApp.scala:
import org.apache.spark._
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka._
object KafkaApp {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("kafkaApp").setMaster("local[*]")
val ssc = new StreamingContext(conf, Seconds(1))
val kafkaParams = Map(
"zookeeper.connect" -> "localhost:2181",
"zookeeper.connection.timeout.ms" -> "10000",
"group.id" -> "sparkGroup"
)
val topics = Map(
"test" -> 1
)
// stream of (topic, ImpressionLog)
val messages = KafkaUtils.createStream(ssc, kafkaParams, topics, storage.StorageLevel.MEMORY_AND_DISK)
println(s"Number of words: %{messages.count()}")
}
}
build.sbt:
name := "Simple Project"
version := "1.1"
scalaVersion := "2.10.4"
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-core" % "1.1.1",
"org.apache.spark" %% "spark-streaming" % "1.1.1",
"org.apache.spark" %% "spark-streaming-kafka" % "1.1.1"
)
resolvers += "Akka Repository" at "http://repo.akka.io/releases/"
And I submit it with:
bin/spark-submit \
--class "KafkaApp" \
--master local[4] \
target/scala-2.10/simple-project_2.10-1.1.jar
Error:
14/12/30 19:44:57 INFO AkkaUtils: Connecting to HeartbeatReceiver: akka.tcp://sparkDriver#192.168.5.252:65077/user/HeartbeatReceiver
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/streaming/kafka/KafkaUtils$
at KafkaApp$.main(KafkaApp.scala:28)
at KafkaApp.main(KafkaApp.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:329)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.streaming.kafka.KafkaUtils$
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
spark-submit does not automatically put the package containing KafkaUtils. You need to have in your project JAR. For that you need to create an all inclusive uber-jar, using sbt assembly. Here is an example build.sbt .
https://github.com/tdas/spark-streaming-external-projects/blob/master/kafka/build.sbt
You obviously also need to add the assembly plugin to SBT.
https://github.com/tdas/spark-streaming-external-projects/tree/master/kafka/project
Please try by including all dependency jars while submitting application:
./spark-submit --name "SampleApp" --deploy-mode client--master spark://host:7077 --class com.stackexchange.SampleApp --jars $SPARK_INSTALL_DIR/spark-streaming-kafka_2.10-1.3.0.jar,$KAFKA_INSTALL_DIR/libs/kafka_2.10-0.8.2.0.jar,$KAFKA_INSTALL_DIR/libs/metrics-core-2.2.0.jar,$KAFKA_INSTALL_DIR/libs/zkclient-0.3.jar spark-example-1.0-SNAPSHOT.jar
Following build.sbt worked for me. It requires you to also put the sbt-assembly plugin in a file under the projects/ directory.
build.sbt
name := "NetworkStreaming" // https://github.com/sbt/sbt-assembly/blob/master/Migration.md#upgrading-with-bare-buildsbt
libraryDependencies ++= Seq(
"org.apache.spark" % "spark-streaming_2.10" % "1.4.1",
"org.apache.spark" % "spark-streaming-kafka_2.10" % "1.4.1", // kafka
"org.apache.hbase" % "hbase" % "0.92.1",
"org.apache.hadoop" % "hadoop-core" % "1.0.2",
"org.apache.spark" % "spark-mllib_2.10" % "1.3.0"
)
mergeStrategy in assembly := {
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard
case "log4j.properties" => MergeStrategy.discard
case m if m.toLowerCase.startsWith("meta-inf/services/") => MergeStrategy.filterDistinctLines
case "reference.conf" => MergeStrategy.concat
case _ => MergeStrategy.first
}
project/plugins.sbt
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.1")
meet the same problem, I solved it by build the jar with dependencies.
add the code below to pom.xml
<build>
<sourceDirectory>src/main/java</sourceDirectory>
<testSourceDirectory>src/test/java</testSourceDirectory>
<plugins>
<!--
Bind the maven-assembly-plugin to the package phase
this will create a jar file without the storm dependencies
suitable for deployment to a cluster.
-->
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<mainClass></mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
mvn package
submit the "example-jar-with-dependencies.jar"
Added the Dependency externally, project-->properties--> java Build Path-->Libraries--> add External jars and add the required jar.
this solved my issue.
Using Spark 1.6 do the job for me without the hassle of handling so many external jars... Can get quite complicate to manage...
You could also download the jar file and put it in the Spark lib folder, because it is not installed with Spark, instead of beating your head trying to bet SBT build.sbt to work.
http://central.maven.org/maven2/org/apache/spark/spark-streaming-kafka-0-10_2.10/2.1.1/spark-streaming-kafka-0-10_2.10-2.1.1.jar
copy it to:
/usr/local/spark/spark-2.1.0-bin-hadoop2.6/jars/
use --packages argument on spark-submit, it takes mvn package in the format group:artifact:version,...
import org.apache.spark.streaming.kafka.KafkaUtils
use the below in build.sbt
name := "kafka"
version := "0.1"
scalaVersion := "2.11.12"
retrieveManaged := true
fork := true
//libraryDependencies += "org.apache.spark" % "spark-streaming_2.11" % "2.2.0"
//libraryDependencies += "org.apache.spark" % "spark-streaming-kafka-0-8_2.11" % "2.1.0"
libraryDependencies += "org.apache.spark" %% "spark-core" % "2.2.0"
//libraryDependencies += "org.apache.spark" %% "spark-sql" % "2.2.0"
libraryDependencies += "org.apache.spark" %% "spark-streaming" % "2.2.0"
// https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-kafka-0-8
libraryDependencies += "org.apache.spark" %% "spark-streaming-kafka-0-8" % "2.2.0" % "provided"
// https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-kafka-0-8-assembly
libraryDependencies += "org.apache.spark" %% "spark-streaming-kafka-0-8-assembly" % "2.2.0"
This will fix the issue

sbt: "impossible to get artifacts when data has not been loaded. IvyNode = org.antlr#stringtemplate;3.2.1"

This looks like an Ivy problem surfacing via sbt (0.11.2) when I invoke the sbt-eclipse plugin:
> eclipse with-source=true
...
[info] Resolving com.googlecode.linkedin-j#linkedin-j-core;1.0.416 ...
[info] Resolving oauth.signpost#signpost-core;1.2.1.1 ...
[info] Resolving net.sf.kxml#kxml2;2.3.0 ...
[info] Resolving commons-cli#commons-cli;1.2 ...
[info] Resolving javax.servlet#servlet-api;2.5 ...
[error] impossible to get artifacts when data has not been loaded. IvyNode = org.antlr#stringtemplate;3.2.1
[error] {file:/home/yang/pod/sales/scala/}pod/*:update-classifiers: java.lang.IllegalStateException: impossible to get artifacts when data has not been loaded. IvyNode = org.antlr#stringtemplate;3.2.1
[info] Resolving org.scala-lang#scala-library;2.9.1 ...
[info] Resolving com.google.protobuf#protobuf-java;2.4.1 ...
[info] Resolving org.scalaquery#scalaquery_2.9.0;0.9.4 ...
[info] Resolving postgresql#postgresql;9.0-801.jdbc4 ...
...
[info] Resolving oauth.signpost#signpost-core;1.2.1.1 ...
[info] Resolving net.sf.kxml#kxml2;2.3.0 ...
[info] Resolving commons-cli#commons-cli;1.2 ...
[info] Resolving javax.servlet#servlet-api;2.5 ...
[error] impossible to get artifacts when data has not been loaded. IvyNode = org.antlr#stringtemplate;3.2.1
[error] {file:/home/yang/pod/sales/scala/}pod/*:update-classifiers: java.lang.IllegalStateException: impossible to get artifacts when data has not been loaded. IvyNode = org.antlr#stringtemplate;3.2.1
[error] Could not create Eclipse project files: Error evaluating task 'update-classifiers': error, Error evaluating task 'update-classifiers': error
I tried blowing away my ~/.{m2,ivy,sbt} directories, to no avail. Relevant extracts from my build.sbt:
...
scalaVersion := "2.9.1"
seq(coffeeSettings: _*)
seq(webSettings: _*)
seq(sbtprotobuf.ProtobufPlugin.protobufSettings: _*)
seq(Revolver.settings: _*)
libraryDependencies ++= Seq(
"org.scalaquery" % "scalaquery_2.9.0" % "0.9.4",
"postgresql" % "postgresql" % "9.0-801.jdbc4", // % "runtime",
"com.jolbox" % "bonecp" % "0.7.1.RELEASE",
"ru.circumflex" % "circumflex-orm" % "2.1-SNAPSHOT",
"ru.circumflex" % "circumflex-core" % "2.1-SNAPSHOT",
"net.sf.ehcache" % "ehcache-core" % "2.4.3",
// snapshots needed for scala 2.9.0 support
"org.scalatra" %% "scalatra" % "2.1.0-SNAPSHOT",
"org.scalatra" %% "scalatra-scalate" % "2.1.0-SNAPSHOT",
"org.scalatra" %% "scalatra-fileupload" % "2.1.0-SNAPSHOT",
"org.fusesource.scalate" % "scalate-jruby" % "1.5.0",
"org.fusesource.scalamd" % "scalamd" % "1.5", // % runtime,
"org.mortbay.jetty" % "jetty" % "6.1.22",
"net.debasishg" % "sjson_2.9.0" % "0.12",
"com.lambdaworks" % "scrypt" % "1.2.0",
"org.mortbay.jetty" % "jetty" % "6.1.22" % "container",
// "org.bowlerframework" %% "core" % "0.4.1",
"net.sf.opencsv" % "opencsv" % "2.1",
"org.apache.commons" % "commons-math" % "2.2",
"org.apache.commons" % "commons-lang3" % "3.0",
"com.google.protobuf" % "protobuf-java" % "2.4.1",
"ch.qos.logback" % "logback-classic" % "0.9.29",
"org.scalatest" % "scalatest_2.9.0" % "1.6.1",
"com.h2database" % "h2" % "1.3.158",
"pentaho.weka" % "pdm-3.7-ce" % "SNAPSHOT",
// this line doesn't work due to sbt bug:
// https://github.com/harrah/xsbt/issues/263
// work around by manually downloading this into the lib/ directory
// "org.rosuda" % "jri" % "0.9-1" from "https://dev.partyondata.com/deps/jri-0.9-1.jar",
"net.java.dev.jna" % "jna" % "3.3.0",
"org.scalala" % "scalala_2.9.0" % "1.0.0.RC2-SNAPSHOT",
"com.joestelmach" % "natty" % "0.5",
"rhino" % "js" % "1.7R2",
"junit" % "junit" % "4.9",
"org.apache.commons" % "commons-email" % "1.2",
"commons-validator" % "commons-validator" % "1.3.1",
"oro" % "oro" % "2.0.8", // validator depends on this
"org.scala-tools.time" %% "time" % "0.5",
"com.carrotsearch" % "hppc" % "0.4.1",
// "com.twitter" %% "util" % "1.12.12",
"com.yammer.metrics" % "metrics-core" % "2.0.0-RC0",
"org.clapper" %% "grizzled-scala" % "1.0.9",
"com.googlecode.linkedin-j" % "linkedin-j-core" % "1.0.416",
"javax.servlet" % "servlet-api" % "2.5" % "provided->default"
)
fork in run := true
mainClass in Revolver.reStart := Some("com.partyondata.Web")
javaOptions ++= (
Seq(
"-Dcom.sun.management.jmxremote",
"-Dcom.sun.management.jmxremote.port=3030",
"-Dcom.sun.management.jmxremote.authenticate=false",
"-Dcom.sun.management.jmxremote.ssl=false",
"-Xmx3G",
"-Djava.library.path=" + System.getenv("HOME") +
"/R/x86_64-pc-linux-gnu-library/2.13/rJava/jri:" +
"/usr/lib/R/site-library/rJava/jri"
)
)
javaOptions in Revolver.reStart <++= javaOptions
javaOptions ++= (System.getenv("JREBEL_PATH") match {
case null => Seq()
case path => Seq("-javaagent:" + path)
})
scalacOptions ++= Seq("-g:vars", "-deprecation", "-unchecked")
// needed for the scalatra snapshots
resolvers ++= Seq(
"Twitter" at "http://maven.twttr.com/",
"Scala-Tools Snapshots" at "http://scala-tools.org/repo-snapshots/",
"Sonatype OSS Snapshots" at "http://oss.sonatype.org/content/repositories/snapshots/",
...
)
initialCommands in consoleQuick := """
import scalala.scalar._;
import scalala.tensor.::;
import scalala.tensor.mutable._;
import scalala.tensor.dense._;
import scalala.tensor.sparse._;
import scalala.library.Library._;
import scalala.library.LinearAlgebra._;
import scalala.library.Statistics._;
import scalala.library.Plotting._;
import scalala.operators.Implicits._;
//
import scala.collection.{mutable => mut}
import scala.collection.JavaConversions._
import ru.circumflex.orm._
import ru.circumflex.core._
"""
And my plugins.sbt:
//
// xsbt-web-plugin
//
libraryDependencies <+= sbtVersion(v => v match {
case "0.11.0" => "com.github.siasia" %% "xsbt-web-plugin" % "0.11.0-0.2.8"
case "0.11.1" => "com.github.siasia" %% "xsbt-web-plugin" % "0.11.1-0.2.10"
case "0.11.2" => "com.github.siasia" %% "xsbt-web-plugin" % "0.11.2-0.2.11"
})
//
// sbteclipse
//
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.0.0")
//
// sbt-protobuf
//
resolvers += "gseitz#github" at "http://gseitz.github.com/maven/"
addSbtPlugin("com.github.gseitz" % "sbt-protobuf" % "0.2.2")
//
// coffeescripted-sbt: this doesn't work
//
resolvers += "less is" at "http://repo.lessis.me"
addSbtPlugin("me.lessis" % "coffeescripted-sbt" % "0.2.0")
//
// sbt-resolver
//
resolvers += "spray repo" at "http://repo.spray.cc"
addSbtPlugin("cc.spray" % "sbt-revolver" % "0.6.0")
This problem only occurs if I include the dependency:
"com.joestelmach" % "natty" % "0.5"
Any ideas on how to work around this issue? Thanks in advance.
I had a similar issue with a dependency of camel test spring. Fixed it by marking it intransitive
libraryDependencies += "org.apache.camel" % "camel-test-spring" % "2.10.1" % "test" intransitive()
You could try adding this line to .sbt file:
dependencyOverrides += "org.apache.camel" % "camel-test-spring" % "2.10.1" % "test"

Resources