线程“main”java.lang.nosuchmethoderror中出现异常:scala.product.$init$(lscala/product;)v

rseugnpd  于 2021-05-27  发布在  Spark
关注(0)|答案(0)|浏览(420)

我正在尝试用intelli运行代码,但出现以下错误。请帮助我查找错误“exception in thread”main“java.lang.nosuchmethoderror:scala.product.$init$(lscala/product;)”。请帮我找出问题所在。我的scala版本是2.11.12和spark 2.4.4

Metorikku$: Starting Metorikku - Parsing configuration
    ConfigurationParser$: Starting Metorikku - Parsing configuration
    Exception in thread "main" java.lang.NoSuchMethodError:scala.Product.$init$(Lscala/Product;)V
    at org.apache.spark.SparkConf$DeprecatedConfig.<init>(SparkConf.scala:810)
    at org.apache.spark.SparkConf$.<init>(SparkConf.scala:644)
    at org.apache.spark.SparkConf$.<clinit>(SparkConf.scala)
    at org.apache.spark.SparkConf.set(SparkConf.scala:95)
    at org.apache.spark.SparkConf.$anonfun$loadFromSystemProperties$3(SparkConf.scala:77)
    at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
    at scala.collection.immutable.HashMap$HashMap1.foreach(HashMap.scala:221)
    at scala.collection.immutable.HashMap$HashTrieMap.foreach(HashMap.scala:428)
    at scala.collection.immutable.HashMap$HashTrieMap.foreach(HashMap.scala:428)
    at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
    at org.apache.spark.SparkConf.loadFromSystemProperties(SparkConf.scala:76)
    at org.apache.spark.SparkConf.<init>(SparkConf.scala:71)
    at org.apache.spark.SparkConf.<init>(SparkConf.scala:58)
    at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$5(SparkSession.scala:927)
    at scala.Option.getOrElse(Option.scala:121)
    at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:926)
    at com.yotpo.metorikku.utils.FileUtils$.getHadoopPath(FileUtils.scala:63)
    at com.yotpo.metorikku.utils.FileUtils$.readFileWithHadoop(FileUtils.scala:72)
    at com.yotpo.metorikku.utils.FileUtils$.readConfigurationFile(FileUtils.scala:56)
    at com.yotpo.metorikku.configuration.job.ConfigurationParser$.parse(ConfigurationParser.scala:34)
    at com.yotpo.metorikku.Metorikku$.delayedEndpoint$com$yotpo$metorikku$Metorikku$1(Metorikku.scala:12)
    at com.yotpo.metorikku.Metorikku$delayedInit$body.apply(Metorikku.scala:9)
    at scala.Function0$class.apply$mcV$sp(Function0.scala:34)
    at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
    at scala.App$$anonfun$main$1.apply(App.scala:76)
    at scala.App$$anonfun$main$1.apply(App.scala:76)
    at scala.collection.immutable.List.foreach(List.scala:392)
    at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
    at scala.App$class.main(App.scala:76)
    at com.yotpo.metorikku.Metorikku$.main(Metorikku.scala:9)
    at com.yotpo.metorikku.Metorikku.main(Metorikku.scala)

build.sbt文件

scalaVersion := "2.11.12"
      val sparkVersion =  Option(System.getProperty("sparkVersion")).getOrElse("2.4.5")
    val jacksonVersion = "2.9.9"

    lazy val excludeJpountz = ExclusionRule(organization = "net.jpountz.lz4", name = "lz4")
    lazy val excludeNetty = ExclusionRule(organization = "io.netty", name = "netty")
    lazy val excludeNettyAll = ExclusionRule(organization = "io.netty", name = "netty-all")
    lazy val excludeAvro = ExclusionRule(organization = "org.apache.avro", name = "avro")
    lazy val excludeSpark = ExclusionRule(organization = "org.apache.spark")
    lazy val excludeFasterXML = ExclusionRule(organization = "com.fasterxml.jackson.module", name= "jackson-module-scala_2.12")
    lazy val excludeMetricsCore = ExclusionRule(organization = "io.dropwizard.metrics", name= "metrics-core")
    lazy val excludeLog4j = ExclusionRule(organization = "org.apache.logging.log4j")
    lazy val excludeParquet = ExclusionRule(organization = "org.apache.parquet")

    libraryDependencies ++= Seq(

     "org.apache.spark" %% "spark-core" % sparkVersion % "provided",
     "org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
     "org.apache.spark" %% "spark-mllib" % sparkVersion % "provided",
     "org.apache.spark" %% "spark-hive" % sparkVersion % "provided",
      "org.apache.spark" %% "spark-sql-kafka-0-10" % sparkVersion % "provided" excludeAll(excludeJpountz),
     "org.apache.spark" %% "spark-streaming" % sparkVersion % "provided",
      "org.apache.spark" %% "spark-avro" % sparkVersion % "provided",
     "com.datastax.spark" %% "spark-cassandra-connector" % "2.4.2",
      "com.holdenkarau" %% "spark-testing-base" % "2.4.3_0.12.0" % "test",
      "com.github.scopt" %% "scopt" % "3.6.0",
      "RedisLabs" % "spark-redis" % "0.3.2",
      "org.json4s" %% "json4s-native" % "3.5.2",
      "io.netty" % "netty-all" % "4.1.32.Final",
      "io.netty" % "netty" % "3.10.6.Final",
      "com.google.guava" % "guava" % "16.0.1",
      "com.typesafe.play" %% "play-json" % "2.6.2",
       "com.databricks" %% "spark-redshift" % "3.0.0-preview1" excludeAll excludeAvro,
     "com.amazon.redshift" % "redshift-jdbc42" % "1.2.1.1001",
      "com.segment.analytics.java" % "analytics" % "2.0.0",
      "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.6",
      "org.scala-lang" % "scala-compiler" % "2.11.12",
      "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion,
      "com.fasterxml.jackson.dataformat" % "jackson-dataformat-cbor" % jacksonVersion,
     "com.fasterxml.jackson.core" % "jackson-core" % jacksonVersion,
     "com.fasterxml.jackson.core" % "jackson-annotations" % jacksonVersion,
     "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion,
      "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % jacksonVersion,
     "com.groupon.dse" % "spark-metrics" % "2.0.0" excludeAll excludeMetricsCore,
     "org.apache.commons" % "commons-text" % "1.6",
     "org.influxdb" % "influxdb-java" % "2.14",
     "org.apache.kafka" %% "kafka" % "2.2.0" % "provided",
     "za.co.absa" % "abris_2.11" % "3.1.1"  % "provided" excludeAll(excludeAvro, excludeSpark),
      "org.apache.hudi" %% "hudi-spark-bundle" % "0.5.2-incubating"          "provided" excludeAll excludeFasterXML,
     "org.apache.parquet" % "parquet-avro" % "1.10.1" % "provided",
      "org.apache.avro" % "avro" % "1.8.2" % "provided",
      "org.apache.hive" % "hive-jdbc" % "2.3.3" % "provided" excludeAll(excludeNetty, excludeNettyAll, excludeLog4j, excludeParquet),

“org.apache.hadoop”%“hadoop aws”%“2.7.3”%“提供”)

暂无答案!

目前还没有任何答案,快来回答吧!

相关问题