scala> carbon.sql("CREATE TABLE IF NOT EXISTS test_table(id string,name string,city string,age Int) STORED BY 'carbondata'")
18/03/01 22:35:12 AUDIT CarbonCreateTableCommand: [localhost.localdomain][root][Thread-1]Creating Table with Database name [default] and Table name [test_table]
res9: org.apache.spark.sql.DataFrame = []
scala> carbon.sql("SELECT * FROM test_table").show()
+---+----+----+---+
| id|name|city|age|
+---+----+----+---+
+---+----+----+---+
scala> carbon.sql("LOAD DATA INPATH '/opt/hadoop/sample.csv INTO TABLE test_table")
18/03/01 22:43:51 ERROR CarbonLoaderUtil: main Not able to acquire the lock for Table status updation for table default.test_table
18/03/01 22:43:51 ERROR CarbonLoaderUtil: main Unable to unlock Table lock for tabledefault.test_table during table status updation
18/03/01 22:43:51 ERROR CarbonLoadDataCommand: main
java.io.IOException: Dataload failed due to failure in table status updation for test_table
at org.apache.carbondata.processing.util.CarbonLoaderUtil.readAndUpdateLoadProgressInTableMeta(CarbonLoaderUtil.java:443)
at org.apache.carbondata.processing.util.CarbonLoaderUtil.readAndUpdateLoadProgressInTableMeta(CarbonLoaderUtil.java:449)
at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:236)
at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:67)
at org.apache.spark.sql.Dataset.(Dataset.scala:183)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:33)
at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:38)
at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:40)
at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:42)
at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw.(:44)
at $line40.$read$$iw$$iw$$iw$$iw$$iw.(:46)
at $line40.$read$$iw$$iw$$iw$$iw.(:48)
at $line40.$read$$iw$$iw$$iw.(:50)
at $line40.$read$$iw$$iw.(:52)
at $line40.$read$$iw.(:54)
at $line40.$read.(:56)
at $line40.$read$.(:60)
at $line40.$read$.()
at $line40.$eval$.$print$lzycompute(:7)
at $line40.$eval$.$print(:6)
at $line40.$eval.$print()
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)
at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)
at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
at org.apache.spark.repl.Main$.doMain(Main.scala:74)
at org.apache.spark.repl.Main$.main(Main.scala:54)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
18/03/01 22:43:51 AUDIT CarbonLoadDataCommand: [localhost.localdomain][root][Thread-1]Dataload failure for default.test_table. Please check the logs
18/03/01 22:43:51 ERROR CarbonLoadDataCommand: main Got exception java.io.IOException: Dataload failed due to failure in table status updation for test_table when processing data. But this command does not support undo yet, skipping the undo part.
java.io.IOException: Dataload failed due to failure in table status updation for test_table
at org.apache.carbondata.processing.util.CarbonLoaderUtil.readAndUpdateLoadProgressInTableMeta(CarbonLoaderUtil.java:443)
at org.apache.carbondata.processing.util.CarbonLoaderUtil.readAndUpdateLoadProgressInTableMeta(CarbonLoaderUtil.java:449)
at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:236)
at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:67)
at org.apache.spark.sql.Dataset.(Dataset.scala:183)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
... 50 elided