在使用apache kyuubi spark lineage插件集成Atlas过程中,出现报错,我使用版本是spark 3.5.3+Atlas 2.4.0
报错内容如下:
```shell
26/02/06 09:17:54 WARN AtlasLineageDispatcher: Send lineage to atlas failed.
java.lang.IllegalStateException: Shutdown in progress, cannot add a shutdownHook
at org.apache.hadoop.util.ShutdownHookManager.addShutdownHook(ShutdownHookManager.java:301) ~[hadoop-client-api-3.4.0-amzn-2.jar:?]
at org.apache.kyuubi.plugin.lineage.dispatcher.atlas.AtlasClient$.registerCleanupShutdownHook(AtlasClient.scala:88) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at org.apache.kyuubi.plugin.lineage.dispatcher.atlas.AtlasClient$.getClient(AtlasClient.scala:74) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at org.apache.kyuubi.plugin.lineage.dispatcher.atlas.AtlasLineageDispatcher.$anonfun$send$2(AtlasLineageDispatcher.scala:37) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at org.apache.kyuubi.plugin.lineage.dispatcher.atlas.AtlasLineageDispatcher.$anonfun$send$2$adapted(AtlasLineageDispatcher.scala:30) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at scala.Option.foreach(Option.scala:407) ~[scala-library-2.12.18.jar:?]
at org.apache.kyuubi.plugin.lineage.dispatcher.atlas.AtlasLineageDispatcher.send(AtlasLineageDispatcher.scala:30) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at org.apache.kyuubi.plugin.lineage.SparkOperationLineageQueryExecutionListener.$anonfun$onSuccess$1(SparkOperationLineageQueryExecutionListener.scala:35) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at org.apache.kyuubi.plugin.lineage.SparkOperationLineageQueryExecutionListener.$anonfun$onSuccess$1$adapted(SparkOperationLineageQueryExecutionListener.scala:35) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) ~[scala-library-2.12.18.jar:?]
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) ~[scala-library-2.12.18.jar:?]
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) ~[scala-library-2.12.18.jar:?]
at org.apache.kyuubi.plugin.lineage.SparkOperationLineageQueryExecutionListener.onSuccess(SparkOperationLineageQueryExecutionListener.scala:35) ~[kyuubi-spark-lineage_2.12-1.11.0.jar:1.11.0]
at org.apache.spark.sql.util.ExecutionListenerBus.doPostEvent(QueryExecutionListener.scala:173) ~[spark-sql_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.sql.util.ExecutionListenerBus.doPostEvent(QueryExecutionListener.scala:143) ~[spark-sql_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.util.ListenerBus.postToAll(ListenerBus.scala:117) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.util.ListenerBus.postToAll$(ListenerBus.scala:101) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.sql.util.ExecutionListenerBus.postToAll(QueryExecutionListener.scala:143) ~[spark-sql_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.sql.util.ExecutionListenerBus.onOtherEvent(QueryExecutionListener.scala:155) ~[spark-sql_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.SparkListenerBus.doPostEvent(SparkListenerBus.scala:100) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.SparkListenerBus.doPostEvent$(SparkListenerBus.scala:28) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.AsyncEventQueue.doPostEvent(AsyncEventQueue.scala:37) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.AsyncEventQueue.doPostEvent(AsyncEventQueue.scala:37) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.util.ListenerBus.postToAll(ListenerBus.scala:117) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.util.ListenerBus.postToAll$(ListenerBus.scala:101) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.AsyncEventQueue.super$postToAll(AsyncEventQueue.scala:105) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.AsyncEventQueue.$anonfun$dispatch$1(AsyncEventQueue.scala:105) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23) ~[scala-library-2.12.18.jar:?]
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) ~[scala-library-2.12.18.jar:?]
at org.apache.spark.scheduler.AsyncEventQueue.org$apache$spark$scheduler$AsyncEventQueue$$dispatch(AsyncEventQueue.scala:100) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.AsyncEventQueue$$anon$2.$anonfun$run$1(AsyncEventQueue.scala:96) ~[spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1358) [spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
at org.apache.spark.scheduler.AsyncEventQueue$$anon$2.run(AsyncEventQueue.scala:96) [spark-core_2.12-3.5.3-amzn-0.jar:3.5.3-amzn-0]
```