问题遇到的现象和发生背景
在IDEA上编写代码,打jar包上传到spark集群上运行报错
问题相关代码,请勿粘贴截图
spark-submit --master spark://7077 --class cd.itcast.Test1 /Daobao.jar
运行结果及报错内容
Exception in thread "main" java.lang.IllegalArgumentException: Wrong FS: file://spark-warehouse, expected: file:///
at org.apache.hadoop.fs.FileSystem.checkPath(FileSystem.java:645)
at org.apache.hadoop.fs.FileSystem.makeQualified(FileSystem.java:465)
at org.apache.hadoop.fs.FilterFileSystem.makeQualified(FilterFileSystem.java:119)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.makeQualifiedPath(SessionCatalog.scala:116)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.createDatabase(SessionCatalog.scala:145)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.<init>(SessionCatalog.scala:89)
at org.apache.spark.sql.internal.SessionState.catalog$lzycompute(SessionState.scala:95)
at org.apache.spark.sql.internal.SessionState.catalog(SessionState.scala:95)
at org.apache.spark.sql.internal.SessionState$$anon$1.<init>(SessionState.scala:112)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:112)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:111)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
at org.apache.spark.sql.SparkSession.baseRelationToDataFrame(SparkSession.scala:382)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:143)
at org.apache.spark.sql.DataFrameReader.json(DataFrameReader.scala:287)
at org.apache.spark.sql.DataFrameReader.json(DataFrameReader.scala:249)
at it.cast.ddtis$.main(ddtis.scala:16)
at it.cast.ddtis.main(ddtis.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:729)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
21/12/30 21:17:24 INFO SparkContext: Invoking stop() from shutdown hook
21/12/30 21:17:24 INFO SparkUI: Stopped Spark web UI at http://192.168.226.128:4040
21/12/30 21:17:24 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
21/12/30 21:17:24 INFO MemoryStore: MemoryStore cleared
21/12/30 21:17:24 INFO BlockManager: BlockManager stopped
21/12/30 21:17:24 INFO BlockManagerMaster: BlockManagerMaster stopped
21/12/30 21:17:24 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
21/12/30 21:17:24 INFO SparkContext: Successfully stopped SparkContext
21/12/30 21:17:24 INFO ShutdownHookManager: Shutdown hook called
21/12/30 21:17:24 INFO ShutdownHookManager: Deleting directory /tmp/spark-42087f9a-e754-4d65-8d4a-63c414c44c71