我有特定的Spark代码,我在那里创造 DataFrame
来自api的给定json响应。此代码还创建 DataFrame
使用递归算法从这个基响应的子json对象和数组中删除。
但有两种情况 org.apache.spark.sql.AnalysisException
但原因不同:
当你想做的时候 spark.sql(s"SELECT * FROM $viewName")
在哪里 viewName
尚未使用创建 createOrReplaceTempView
(见例外情况1)
尝试分解空json数组时(如。 {"RELATED": []}
)(见例外2)
当我使用下面的 try/catch
块:
try {
// val existing = spark.sql(s"SELECT * FROM $viewName")
// Some more Spark Code
// val temp = df.select(explode($"${arr.name}").as(arr.name)).select(s"${arr.name}.*")
}
catch {
case e: AnalysisException =>
e.printStackTrace()
}
例外情况1:
org.apache.spark.sql.AnalysisException: Table or view not found: SET_TYPE_ID; line 1 pos 14
at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:47)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$lookupTableFromCatalog(Analyzer.scala:731)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.resolveRelation(Analyzer.scala:683)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$8.applyOrElse(Analyzer.scala:713)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$8.applyOrElse(Analyzer.scala:706)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1$$anonfun$apply$1.apply(AnalysisHelper.scala:90)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1$$anonfun$apply$1.apply(AnalysisHelper.scala:90)
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:89)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:86)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsUp(AnalysisHelper.scala:86)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalPlan.scala:29)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1$$anonfun$1.apply(AnalysisHelper.scala:87)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1$$anonfun$1.apply(AnalysisHelper.scala:87)
at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:329)
at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:327)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:87)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:86)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsUp(AnalysisHelper.scala:86)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalPlan.scala:29)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:706)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:652)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84)
at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
at scala.collection.immutable.List.foldLeft(List.scala:84)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76)
at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127)
at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121)
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106)
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
at JsonParser$.recurse(JsonParser.scala:31)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:54)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:52)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:63)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:54)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:52)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:63)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:54)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:52)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:63)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$.delayedEndpoint$JsonParser$1(JsonParser.scala:24)
at JsonParser$delayedInit$body.apply(JsonParser.scala:8)
at scala.Function0$class.apply$mcV$sp(Function0.scala:34)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:392)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
at scala.App$class.main(App.scala:76)
at JsonParser$.main(JsonParser.scala:8)
at JsonParser.main(JsonParser.scala)
Caused by: org.apache.spark.sql.catalyst.analysis.NoSuchTableException: Table or view 'set_type_id' not found in database 'default';
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog$class.requireTableExists(ExternalCatalog.scala:48)
at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.requireTableExists(InMemoryCatalog.scala:45)
at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.getTable(InMemoryCatalog.scala:326)
at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.getTable(ExternalCatalogWithListener.scala:138)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.lookupRelation(SessionCatalog.scala:706)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$lookupTableFromCatalog(Analyzer.scala:728)
... 90 more
例外2:
org.apache.spark.sql.AnalysisException: Can only star expand struct data types. Attribute: `ArrayBuffer(RELATED)`;
at org.apache.spark.sql.catalyst.analysis.UnresolvedStar.expand(unresolved.scala:321)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveReferences$$anonfun$org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveReferences$$buildExpandedProjectList$1.apply(Analyzer.scala:978)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveReferences$$anonfun$org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveReferences$$buildExpandedProjectList$1.apply(Analyzer.scala:976)
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveReferences$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveReferences$$buildExpandedProjectList(Analyzer.scala:976)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveReferences$$anonfun$apply$9.applyOrElse(Analyzer.scala:904)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveReferences$$anonfun$apply$9.applyOrElse(Analyzer.scala:899)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1$$anonfun$apply$1.apply(AnalysisHelper.scala:90)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1$$anonfun$apply$1.apply(AnalysisHelper.scala:90)
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:89)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:86)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsUp(AnalysisHelper.scala:86)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalPlan.scala:29)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveReferences$.apply(Analyzer.scala:899)
at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveReferences$.apply(Analyzer.scala:756)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84)
at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
at scala.collection.immutable.List.foldLeft(List.scala:84)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84)
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76)
at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127)
at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121)
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106)
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withPlan(Dataset.scala:3412)
at org.apache.spark.sql.Dataset.select(Dataset.scala:1340)
at org.apache.spark.sql.Dataset.select(Dataset.scala:1358)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:60)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:63)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:54)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:52)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:63)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:54)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:52)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:63)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:54)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:52)
at JsonParser$$anonfun$recurse$1.apply(JsonParser.scala:48)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:48)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:63)
at JsonParser$$anonfun$recurse$2.apply(JsonParser.scala:57)
at scala.collection.immutable.List.foreach(List.scala:392)
at JsonParser$.recurse(JsonParser.scala:57)
at JsonParser$.delayedEndpoint$JsonParser$1(JsonParser.scala:24)
at JsonParser$delayedInit$body.apply(JsonParser.scala:8)
at scala.Function0$class.apply$mcV$sp(Function0.scala:34)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:392)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
at scala.App$class.main(App.scala:76)
at JsonParser$.main(JsonParser.scala:8)
at JsonParser.main(JsonParser.scala)
是否有任何方法可以获取[除了获取异常消息和查找某些字符串关键字之外]中发生的实际异常(例如。 NoSuchTableException
在异常1中),这样我就可以以更干净的方式处理异常了?
暂无答案!
目前还没有任何答案,快来回答吧!