在spark中创建hive分区时出现UnsatisfiedLinkError

时间:2016-12-16 10:09:21

标签: scala hadoop apache-spark hive

我使用scala在Spark中使用hive上下文。

我正在创建一个带分区的配置单元外部表,但我收到错误:

org.apache.hadoop.util.NativeCrc32.nativeComputeChunkedSumsByteArray(II[BI[BIILjava/lang/String;JZ)V

我可以在没有分区的情况下创建外部表没问题。我只是不能用分区创建它。这确实在2天前工作,所以我非常困惑。我的开发环境在windows上,我使用hadoop-common-2.2.0运行hadoop。

这里有更多的堆栈跟踪:

15:32:43,702  WARN hive.log log:327 - Updating partition stats fast for: r_table
15:32:43,708  WARN hive.log log:330 - Updated size to 307
15:32:43,714  WARN hive.log log:327 - Updating partition stats fast for: r_table
15:32:43,719  WARN hive.log log:330 - Updated size to 307
15:32:43,855  INFO org.apache.hadoop.hive.ql.log.PerfLogger PerfLogger:163 - </PERFLOG method=add_partitions_req start=1481815963617 end=1481815963855 duration=238 from=org.apache.hadoop.hive.metastore.RetryingHMSHandler threadId=0 retryCount=0 error=false>
15:32:43,896 ERROR hive.ql.exec.DDLTask DDLTask:517 - java.lang.UnsatisfiedLinkError: org.apache.hadoop.util.NativeCrc32.nativeComputeChunkedSumsByteArray(II[BI[BIILjava/lang/String;JZ)V
    at org.apache.hadoop.util.NativeCrc32.nativeComputeChunkedSumsByteArray(Native Method)
    at org.apache.hadoop.util.NativeCrc32.calculateChunkedSumsByteArray(NativeCrc32.java:86)
    at org.apache.hadoop.util.DataChecksum.calculateChunkedSums(DataChecksum.java:430)
    at org.apache.hadoop.fs.FSOutputSummer.writeChecksumChunks(FSOutputSummer.java:207)
    at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:164)
    at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:145)
    at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.close(ChecksumFileSystem.java:400)
    at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72)
    at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:106)
    at sun.nio.cs.StreamEncoder.implClose(StreamEncoder.java:320)
    at sun.nio.cs.StreamEncoder.close(StreamEncoder.java:149)
    at java.io.OutputStreamWriter.close(OutputStreamWriter.java:233)
    at java.io.BufferedWriter.close(BufferedWriter.java:266)
    at org.apache.hadoop.hive.ql.exec.DDLTask.msck(DDLTask.java:1759)
    at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:367)
    at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
    at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
    at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1774)
    at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1531)
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1311)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1120)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1108)
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:486)
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:475)
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:281)
    at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:228)
    at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:227)
    at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:270)
    at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:475)
    at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:465)
    at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:607)
    at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:33)
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)
    at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
    at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)
    at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
    at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)
    at com.bte.commons.test.TestUtils$class.repairTable(TestUtils.scala:84)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.repairTable(B_HUB_INVENTORY_SparkTest.scala:18)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest$$anonfun$41.apply$mcV$sp(B_HUB_INVENTORY_SparkTest.scala:1486)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest$$anonfun$41.apply(B_HUB_INVENTORY_SparkTest.scala:1449)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest$$anonfun$41.apply(B_HUB_INVENTORY_SparkTest.scala:1449)
    at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
    at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
    at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
    at org.scalatest.Transformer.apply(Transformer.scala:22)
    at org.scalatest.Transformer.apply(Transformer.scala:20)
    at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
    at org.scalatest.Suite$class.withFixture(Suite.scala:1122)
    at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555)
    at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
    at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
    at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
    at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
    at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.org$scalatest$BeforeAndAfter$$super$runTest(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.runTest(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
    at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
    at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
    at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
    at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
    at org.scalatest.Suite$class.run(Suite.scala:1424)
    at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
    at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
    at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.org$scalatest$BeforeAndAfterAll$$super$run(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
    at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.org$scalatest$BeforeAndAfter$$super$run(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.run(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.junit.JUnitRunner.run(JUnitRunner.scala:99)
    at org.junit.runner.JUnitCore.run(JUnitCore.java:160)
    at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:117)
    at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:42)
    at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:262)
    at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:84)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
    at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)

15:32:43,902 ERROR org.apache.hadoop.hive.ql.Driver Driver:936 - FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.util.NativeCrc32.nativeComputeChunkedSumsByteArray(II[BI[BIILjava/lang/String;JZ)V
15:32:43,903 DEBUG org.apache.hadoop.hive.ql.Driver Driver:132 - Shutting down query MSCK REPAIR TABLE default.R_TABLE
15:32:43,904  INFO org.apache.hadoop.hive.ql.log.PerfLogger PerfLogger:163 - </PERFLOG method=Driver.execute start=1481815963415 end=1481815963904 duration=489 from=org.apache.hadoop.hive.ql.Driver>
15:32:43,904  INFO hive.ql.metadata.Hive Hive:3241 - Dumping metastore api call timing information for : execution phase
15:32:43,905 DEBUG hive.ql.metadata.Hive Hive:3218 - Total time spent in each metastore function (ms): {isCompatibleWith_(HiveConf, )=0, add_partitions_(List, boolean, boolean, )=239, listPartitionsWithAuthInfo_(String, String, short, String, List, )=48, getTable_(String, String, )=101}
15:32:43,906  INFO org.apache.hadoop.hive.ql.Driver Driver:1696 - Completed executing command(queryId=608367024_20161215153131_eff9896d-a5a2-41cb-803a-b8cd3c547998); Time taken: 0.489 seconds
15:32:43,906  INFO org.apache.hadoop.hive.ql.log.PerfLogger PerfLogger:122 - <PERFLOG method=releaseLocks from=org.apache.hadoop.hive.ql.Driver>
15:32:43,907  INFO org.apache.hadoop.hive.ql.log.PerfLogger PerfLogger:163 - </PERFLOG method=releaseLocks start=1481815963906 end=1481815963907 duration=1 from=org.apache.hadoop.hive.ql.Driver>
15:32:43,907 DEBUG org.apache.hadoop.hive.ql.Driver Driver:132 - Shutting down query MSCK REPAIR TABLE default.R_TABLE
15:32:43,926  WARN org.apache.hadoop.fs.FileUtil FileUtil:190 - Failed to delete file or dir [C:\Users\608367~1\AppData\Local\Temp\608367024\7e86079a-9887-4ccc-92bc-6264a3a800d2\hive_2016-12-15_15-32-43_390_5997593973333834654-1\-local-10000]: it still exists.
15:32:43,928  WARN org.apache.hadoop.fs.FileUtil FileUtil:190 - Failed to delete file or dir [C:\Users\608367~1\AppData\Local\Temp\608367024\7e86079a-9887-4ccc-92bc-6264a3a800d2\hive_2016-12-15_15-32-43_390_5997593973333834654-1\.-local-10000.crc]: it still exists.
15:32:43,965 ERROR org.apache.spark.sql.hive.client.ClientWrapper ClientWrapper:74 - 
======================
HIVE FAILURE OUTPUT
======================
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                           SET hive.exec.dynamic.partition=true
SET hive.exec.dynamic.partition.mode=nonstrict
SET hive.mapred.mode=strict
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
OK
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.util.NativeCrc32.nativeComputeChunkedSumsByteArray(II[BI[BIILjava/lang/String;JZ)V

======================
END HIVE FAILURE OUTPUT
======================


org.apache.spark.sql.execution.QueryExecutionException: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.util.NativeCrc32.nativeComputeChunkedSumsByteArray(II[BI[BIILjava/lang/String;JZ)V

    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:490)
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:475)
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:281)
    at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:228)
    at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:227)
    at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:270)
    at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:475)
    at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:465)
    at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:607)
    at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:33)
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)
    at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
    at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)
    at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
    at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)
    at com.bte.commons.test.TestUtils$class.repairTable(TestUtils.scala:84)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.repairTable(B_HUB_INVENTORY_SparkTest.scala:18)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest$$anonfun$41.apply$mcV$sp(B_HUB_INVENTORY_SparkTest.scala:1486)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest$$anonfun$41.apply(B_HUB_INVENTORY_SparkTest.scala:1449)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest$$anonfun$41.apply(B_HUB_INVENTORY_SparkTest.scala:1449)
    at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
    at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
    at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
    at org.scalatest.Transformer.apply(Transformer.scala:22)
    at org.scalatest.Transformer.apply(Transformer.scala:20)
    at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
    at org.scalatest.Suite$class.withFixture(Suite.scala:1122)
    at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555)
    at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
    at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
    at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
    at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
    at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.org$scalatest$BeforeAndAfter$$super$runTest(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.runTest(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
    at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
    at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
    at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
    at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
    at org.scalatest.Suite$class.run(Suite.scala:1424)
    at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
    at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
    at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.org$scalatest$BeforeAndAfterAll$$super$run(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
    at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.org$scalatest$BeforeAndAfter$$super$run(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
    at com.bte.enrichment.base.B_HUB_INVENTORY_SparkTest.run(B_HUB_INVENTORY_SparkTest.scala:18)
    at org.scalatest.junit.JUnitRunner.run(JUnitRunner.scala:99)
    at org.junit.runner.JUnitCore.run(JUnitCore.java:160)
    at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:117)
    at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:42)
    at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:262)
    at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:84)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
    at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)

0 个答案:

没有答案