When I run in terminal:
sudo spark-submit --master local --class xxx.xxxx.xxx.xxxx.xxxxxxxxxxxxJob --conf 'spark.driver.extraJavaOptions=-Dconfig.resource=xxx.conf' /home/xxxxx/workspace/prueba/pruebas/target/scala-2.11/MiPrueba.jar
I get the following error:
Exception in thread "main" java.lang.NoSuchMethodError: scala.Predef$.refArrayOps([Ljava/lang/Object;)Lscala/collection/mutable/ArrayOps; at pureconfig.DurationUtils$.words(DurationUtils.scala:36) at pureconfig.DurationUtils$.pureconfig$DurationUtils$$expandLabels(DurationUtils.scala:38) at pureconfig.DurationUtils$$anonfun$2.apply(DurationUtils.scala:53) at pureconfig.DurationUtils$$anonfun$2.apply(DurationUtils.scala:53) at scala.collection.immutable.List.flatMap(List.scala:338) at pureconfig.DurationUtils$.(DurationUtils.scala:53) at pureconfig.DurationUtils$.(DurationUtils.scala) at pureconfig.DurationReaders$class.$init$(BasicReaders.scala:114) at pureconfig.ConfigReader$.(ConfigReader.scala:121) at pureconfig.ConfigReader$.(ConfigReader.scala) at xxx.xxxx.xxx.xxxx.config.package$Config$.load(package.scala:67) at xxx.xxxx.xxx.xxxx.job.xxxxJob$class.main(XXXxxx.scala:23) at xxx.xxxx.xxx.xxxx......Job$.main(Xxxxxxxxxxxx.scala:19) at xxx.xxxx.xxx.xxxx..main(XXXXXXxxxxxxxx.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Build definitions:
version := "0.1"
scalaVersion := "2.11.11"
libraryDependencies:
val dependFullList = spark ++ hadoop ++ apisDownload ++ configuration
configuration:
val configuration = Seq(
"com.github.pureconfig" %% "pureconfig" % "0.9.2",
"com.typesafe" % "config" % "1.3.1",
"org.lz4" % "lz4-java" % "1.4.1"
)
Spark:
val spark = Seq(
"org.apache.spark" %% "spark-core" % Versions.spark % "provided" exclude("javax.jms", "jms"),
"org.apache.spark" %% "spark-sql" % Versions.spark % "provided",
"com.databricks" %% "spark-xml" % "0.4.1"
// https://mvnrepository.com/artifact/mrpowers/spark-daria
)
Any ideas?
spark-submit --version
? – Krzysztof Atłasik