在EMR集群上以纱线模式运行Spark应用程序

时间:2018-08-08 21:21:39

标签: java apache-spark drools amazon-emr kie

我运行了这段代码,它给出了一个空指针异常。当我进一步挖掘时,发现它无法在kie会话中创建对象


代码:

package com.rsrit.cob.drools;

import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.io.Serializable;

import org.kie.api.runtime.StatelessKieSession;
import org.kie.internal.command.CommandFactory;

import com.rsrit.cob.Variables.ClaimInfo;
import com.rsrit.cob.drools.KieSessionFactory;

@SuppressWarnings("serial")
public class RulesExecutor implements Serializable{
    public static BufferedWriter log = new BufferedWriter(new OutputStreamWriter(System.out)); 
    @SuppressWarnings("unchecked")
    public ClaimInfo evalRules(ClaimInfo claimObj,String ruleFileLoc){
        if (ruleFileLoc != null){
        StatelessKieSession ksession = KieSessionFactory.getKieSession(ruleFileLoc);
        ksession.execute(CommandFactory.newInsert(claimObj));
        }else{
            try{
            log.write("Rules File Location is Invalid or Null\n");
             log.flush();
            }catch(Exception e){
                e.printStackTrace();
            }
        }
        return claimObj;
    }
    /*public static String ruleFileConnection(String _ruleFileLoc){

        try{
            String rulesPath = _ruleFileLoc;
            ClassLoader loader =Thread.currentThread().getContextClassLoader();
            Properties props = new Properties();
            try(InputStream rulesLocStream = loader.getResourceAsStream(rulesPath)){
                props.load(rulesLocStream);
            }
            return props.getProperty("ruleFileLoc");

        } catch (FileNotFoundException ex) {
            return null;
        } catch (IOException ex) {
            return null;
        }
    }*/


}


命令:spark-submit --class com.rsrit.cob.application.RecoverableClaimsMain molinaHealthcare-yarn.jar ClaimsCompleteInfo.txt CompleteMembersInfo.txt rules.drl


驱动程序堆栈跟踪:         在org.apache.spark.scheduler.DAGScheduler.org $ apache $ spark $ scheduler $ DAGScheduler $$ failJobAndIndependentStages(DAGScheduler.scala:1753)中         在org.apache.spark.scheduler.DAGScheduler $$ anonfun $ abortStage $ 1.apply(DAGScheduler.scala:1741)         在org.apache.spark.scheduler.DAGScheduler $$ anonfun $ abortStage $ 1.apply(DAGScheduler.scala:1740)         在scala.collection.mutable.ResizableArray $ class.foreach(ResizableArray.scala:59)         在scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)         在org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1740)         在org.apache.spark.scheduler.DAGScheduler $$ anonfun $ handleTaskSetFailed $ 1.apply(DAGScheduler.scala:871)         在org.apache.spark.scheduler.DAGScheduler $$ anonfun $ handleTaskSetFailed $ 1.apply(DAGScheduler.scala:871)         在scala.Option.foreach(Option.scala:257)         在org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:871)         在org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1974)         在org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1923)         在org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1912)         在org.apache.spark.util.EventLoop $$ anon $ 1.run(EventLoop.scala:48)         在org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:682)         在org.apache.spark.SparkContext.runJob(SparkContext.scala:2034)         在org.apache.spark.SparkContext.runJob(SparkContext.scala:2055)         在org.apache.spark.SparkContext.runJob(SparkContext.scala:2074)         在org.apache.spark.SparkContext.runJob(SparkContext.scala:2099)         在org.apache.spark.rdd.RDD.count(RDD.scala:1162)         在org.apache.spark.api.java.JavaRDDLike $ class.count(JavaRDDLike.scala:455)         在org.apache.spark.api.java.AbstractJavaRDDLike.count(JavaRDDLike.scala:45)         在com.rsrit.cob.application.RecoverableClaimsMain.main(RecoverableClaimsMain.java:169)         在sun.reflect.NativeMethodAccessorImpl.invoke0(本机方法)处         在sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)         在sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)         在java.lang.reflect.Method.invoke(Method.java:498)         在org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)         在org.apache.spark.deploy.SparkSubmit $ .org $ apache $ spark $ deploy $ SparkSubmit $$ runMain(SparkSubmit.scala:894)中         在org.apache.spark.deploy.SparkSubmit $ .doRunMain $ 1(SparkSubmit.scala:198)         在org.apache.spark.deploy.SparkSubmit $ .submit(SparkSubmit.scala:228)         在org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:137)         在org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 造成原因:java.lang.NullPointerException         在com.rsrit.cob.drools.RulesExecutor.evalRules(RulesExecutor.java:20)         在com.rsrit.cob.application.RecoverableClaimsMain.lambda $ main $ bc755edb $ 1(RecoverableClaimsMain.java:165)         在org.apache.spark.api.java.JavaPairRDD $$ anonfun $ toScalaFunction $ 1.apply(JavaPairRDD.scala:1040)         在scala.collection.Iterator $$ anon $ 11.next(Iterator.scala:409)         在scala.collection.Iterator $$ anon $ 13.hasNext(Iterator.scala:462)         在org.apache.spark.util.Utils $ .getIteratorSize(Utils.scala:1836)         在org.apache.spark.rdd.RDD $$ anonfun $ count $ 1.apply(RDD.scala:1162)         在org.apache.spark.rdd.RDD $$ anonfun $ count $ 1.apply(RDD.scala:1162)         在org.apache.spark.SparkContext $$ anonfun $ runJob $ 5.apply(SparkContext.scala:2074)         在org.apache.spark.SparkContext $$ anonfun $ runJob $ 5.apply(SparkContext.scala:2074)         在org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)         在org.apache.spark.scheduler.Task.run(Task.scala:109)         在org.apache.spark.executor.Executor $ TaskRunner.run(Executor.scala:345)         在java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)         在java.util.concurrent.ThreadPoolExecutor $ Worker.run(ThreadPoolExecutor.java:624)         在java.lang.Thread.run(Thread.java:748) 18/08/08 21:04:06 INFO SparkContext:从关机钩子调用stop()

0 个答案:

没有答案
相关问题