这是我以前的帖子 Created drools Kie session and I get this "class not found exception" 并且我可以使用“ KieRunTimeLogger”在此处生成日志。现在,我尝试用另一个示例继续进行下去,在该示例中,我使用了以前的打印语句在屏幕上显示输出。当我在本地运行代码时,它运行良好,但是当我进行纱线模式(应用程序在spark上运行)时,不会显示打印语句。因此,我想创建日志,以便跟踪.drl文件中的哪些规则针对哪个对象。我可以在上面的示例(上面的链接)中执行此操作,但是在此示例中尝试执行此操作时,出现错误。
RulesExector.java
package com.rsrit.cob.drools;
import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import org.kie.api.KieServices;
import org.kie.api.logger.KieRuntimeLogger;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.StatelessKieSession;
import org.kie.internal.command.CommandFactory;
import com.rsrit.cob.Variables.ClaimInfo;
import com.rsrit.cob.drools.KieSessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings("serial")
public class RulesExecutor implements Serializable{
KieServices ks = KieServices.Factory.get();
KieContainer kContainer = ks.getKieClasspathContainer();
public static BufferedWriter log = new BufferedWriter(new OutputStreamWriter(System.out));
@SuppressWarnings("unchecked")
public ClaimInfo evalRules(ClaimInfo claimObj,String ruleFileLoc){
if (ruleFileLoc != null){
//StatelessKieSession ksession = KieSessionFactory.getKieSession(ruleFileLoc);
KieSession ksession = kContainer.newKieSession("ksession-rule");
KieRuntimeLogger logger = KieServices.Factory.get().getLoggers().newFileLogger(ksession,"C://Users/katuk/eclipse-workspace/test");
//ksession.insert("claim id");
ksession.execute(CommandFactory.newInsert(claimObj));
}else{
try{
log.write("Rules File Location is Invalid or Null\n");
log.flush();
}catch(Exception e){
e.printStackTrace();
}
}
return claimObj;
}
/*public static String ruleFileConnection(String _ruleFileLoc){
try{
String rulesPath = _ruleFileLoc;
ClassLoader loader =Thread.currentThread().getContextClassLoader();
Properties props = new Properties();
try(InputStream rulesLocStream = loader.getResourceAsStream(rulesPath)){
props.load(rulesLocStream);
}
return props.getProperty("ruleFileLoc");
} catch (FileNotFoundException ex) {
return null;
} catch (IOException ex) {
return null;
}
}*/
}
kmodule.xml
<?xml version="1.0" encoding="UTF-8"?>
<kmodule xmlns="http://jboss.org/kie/7.1.0.Final/kmodule">
<kbase name="KBase" default="true" packages="com.rsrit.cob">
<ksession name="KSession" type="stateless" default="true" />
</kbase>
</kmodule>
RecoverableClaimsMain.java
package com.rsrit.cob.application;
import com.rsrit.cob.drools.*;
import com.rsrit.cob.Variables.*;
import com.rsrit.cob.dataSource.*;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.SparkSession;
public class RecoverableClaimsMain {
public static void main(String[] args){
// TODO Auto-generated method stub
List<ClaimInfo> inputData = new ArrayList<ClaimInfo>();
PrimaryClaimInfo primaryObj;
// InputStreamReader bReader = new InputStreamReader(new
// FileInputStream(new File(args[0])));
BufferedReader bReader = null;
try {
bReader = new BufferedReader(new FileReader(args[0]));
String currentLine;
bReader.readLine();
while ((currentLine = bReader.readLine()) != null) {
ParseClaimsFromTextFile textfile = new ParseClaimsFromTextFile(currentLine);
//System.out.println("testing the current line "+currentLine);
/*
* After Initializing all the variables, we have to create a
* substring that only takes the Last 3 characters of the
* membership Id. we are considering that a particular
* membership is primary or secondary is determined by the last
* 3 digits like if the membership id is ending with -01 or -02
* then we that member to be secondary, else we are considering
* all the members to be primary
*
*/
String subStg_MBSID = textfile.getMembership_ID().substring(textfile.getMembership_ID().length() - 3);
/*
* Now we are creating an if-else statement to do the specific
* operations on Primary Members and secondary Members
*/
if (subStg_MBSID.equals("-01") || subStg_MBSID.equals("-02")) {
/*
* if the membership of the record is secondary then we call
* a method which creates a new object for linking the
* primary Member.
*/
//System.out.println(textfile.getMembership_ID());
primaryObj = textfile.primary_Member_Info(textfile.getMembership_ID(), textfile.getLine_Date_Tst(),
textfile.getClaim_id(), args[1]);
/*
* After creating the Object for Primary Member , we call
* create new object which take the previous primary member
* object as one of its arguments and add that object to our
* ArrayList.
*/
if (primaryObj != null) {
inputData.add(new ClaimInfo(textfile.getClaim_id(), textfile.getLineNum(),
textfile.getPrimacy_value(), textfile.getProcedure_covered(),
textfile.getProvider_Zipcode(), textfile.getPart_Type(),
textfile.getEvent_Names().split(","), textfile.getMbs_Type(),
textfile.getEmployer_Size_Aged(), textfile.getEmployer_Size_Disabled(),
textfile.getLine_Date_Tst(), textfile.getEvent_Start_Dates().split(","),
textfile.getEvent_End_Dates().split(","), textfile.currentTime(),
textfile.previousYearTime(), textfile.getPrimary_Memeber(),
textfile.getPsn_First_Name(), textfile.getPsn_Last_Name(), textfile.getCharge_amount(),
textfile.getNet_Charged_Amt(), textfile.getMembership_ID(), primaryObj, subStg_MBSID,
textfile.getProvider_Name(), textfile.getAge(), textfile.getGender()));
System.out.println("claims from secondary members: "+textfile.getMembership_ID());
} else {
System.out.println("Not Enough valid Information of the Primary Member of the Claim "+textfile.getClaim_id());
}
} else {
/*
* Lets say if the member of the record is primary then we
* can directly create an Object using all the variables
* that we have created from the columns of the result-set
* and we add that object to our ArrayList
*/
inputData.add(new ClaimInfo(textfile.getClaim_id(), textfile.getLineNum(),
textfile.getPrimacy_value(), textfile.getProcedure_covered(),
textfile.getProvider_Zipcode(), textfile.getPart_Type(),
textfile.getEvent_Names().split(","), textfile.getMbs_Type(),
textfile.getEmployer_Size_Aged(), textfile.getEmployer_Size_Disabled(),
textfile.getLine_Date_Tst(), textfile.getEvent_Start_Dates().split(","),
textfile.getEvent_End_Dates().split(","), textfile.currentTime(),
textfile.previousYearTime(), textfile.getPrimary_Memeber(), textfile.getPsn_First_Name(),
textfile.getPsn_Last_Name(), textfile.getCharge_amount(), textfile.getNet_Charged_Amt(),
textfile.getMembership_ID(), subStg_MBSID, textfile.getProvider_Name(), textfile.getAge(),
textfile.getGender()));
}
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
} finally {
try {
bReader.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/*
* Create a sparkSession with master as local since we are not running
* our application on any multi-node cluster(for NOW) then using that
* sparksession create new sparkcontext
*/
SparkSession spark = SparkSession.builder().appName("Phase-one Test").master("local").getOrCreate();
JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
/*
* Now lets start a KieSession by creating a new releExecutor Object
*/
RulesExecutor rulesExecutor = new RulesExecutor();
/*
* Now broadcast the KieSession to different nodes, in-case if you need
* to run your application on an multi-node cluster in future.
*/
Broadcast<RulesExecutor> broadcastRules = sc.broadcast(rulesExecutor);
JavaRDD<ClaimInfo> claims = sc.parallelize(inputData);
System.out.println(claims.count());
/*
* After creating an JavaRDD now apply the the mapping function on the
* method applyRules() method where we send the Rules that were
* broadcasted and claimInfo object as input arguments to the method.
* After applying the map function we can count the number of
* recoverable claims easily by count the filtered objects WHOSE
* recoverable variable as TRUE.
*/
JavaRDD<ClaimInfo> appliedClaims = claims
.map(mainclaims -> broadcastRules.value().evalRules(mainclaims, args[2]));
JavaRDD<ClaimInfo> recoveredClaims = appliedClaims.filter(mainClaims -> mainClaims.isRecoverable());
System.out.println("Number of claims recovered: " + recoveredClaims.count());
sc.close();
spark.close();
}
}
运行程序时出错
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
18/08/20 11:46:43 INFO SparkContext: Running Spark version 2.3.0
18/08/20 11:46:43 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
18/08/20 11:46:43 INFO SparkContext: Submitted application: Phase-one Test
18/08/20 11:46:44 INFO SecurityManager: Changing view acls to: katuk
18/08/20 11:46:44 INFO SecurityManager: Changing modify acls to: katuk
18/08/20 11:46:44 INFO SecurityManager: Changing view acls groups to:
18/08/20 11:46:44 INFO SecurityManager: Changing modify acls groups to:
18/08/20 11:46:44 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(katuk); groups with view permissions: Set(); users with modify permissions: Set(katuk); groups with modify permissions: Set()
18/08/20 11:46:45 INFO Utils: Successfully started service 'sparkDriver' on port 53222.
18/08/20 11:46:45 INFO SparkEnv: Registering MapOutputTracker
18/08/20 11:46:45 INFO SparkEnv: Registering BlockManagerMaster
18/08/20 11:46:45 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
18/08/20 11:46:45 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
18/08/20 11:46:45 INFO DiskBlockManager: Created local directory at C:\Users\katuk\AppData\Local\Temp\blockmgr-11105d7d-3dc9-412b-959a-8a917e887744
18/08/20 11:46:45 INFO MemoryStore: MemoryStore started with capacity 891.0 MB
18/08/20 11:46:45 INFO SparkEnv: Registering OutputCommitCoordinator
18/08/20 11:46:45 INFO Utils: Successfully started service 'SparkUI' on port 4040.
18/08/20 11:46:45 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://WINDOWS-2G24VKQ:4040
18/08/20 11:46:45 INFO Executor: Starting executor ID driver on host localhost
18/08/20 11:46:45 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 53235.
18/08/20 11:46:45 INFO NettyBlockTransferService: Server created on WINDOWS-2G24VKQ:53235
18/08/20 11:46:45 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
18/08/20 11:46:45 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, WINDOWS-2G24VKQ, 53235, None)
18/08/20 11:46:45 INFO BlockManagerMasterEndpoint: Registering block manager WINDOWS-2G24VKQ:53235 with 891.0 MB RAM, BlockManagerId(driver, WINDOWS-2G24VKQ, 53235, None)
18/08/20 11:46:45 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, WINDOWS-2G24VKQ, 53235, None)
18/08/20 11:46:45 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, WINDOWS-2G24VKQ, 53235, None)
18/08/20 11:46:46 INFO ClasspathKieProject: Found kmodule: file:/D:/HealthCare/Molina/ExecutableJarFileWithResources/molinaHealthcare/target/classes/META-INF/kmodule.xml
18/08/20 11:46:46 ERROR ClasspathKieProject: Unable to build index of kmodule.xml url=file:/D:/HealthCare/Molina/ExecutableJarFileWithResources/molinaHealthcare/target/classes/META-INF/kmodule.xml
XSD validation failed against the new schema (cvc-elt.1: Cannot find the declaration of element 'kmodule'.) and against the old schema (cvc-elt.1: Cannot find the declaration of element 'kmodule'.).
18/08/20 11:46:46 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 1416.0 B, free 891.0 MB)
Exception in thread "main" java.io.NotSerializableException: org.drools.compiler.kie.builder.impl.KieContainerImpl
Serialization stack:
- object not serializable (class: org.drools.compiler.kie.builder.impl.KieContainerImpl, value: org.drools.compiler.kie.builder.impl.KieContainerImpl@17814b1c)
- field (class: com.rsrit.cob.drools.RulesExecutor, name: kContainer, type: interface org.kie.api.runtime.KieContainer)
- object (class com.rsrit.cob.drools.RulesExecutor, com.rsrit.cob.drools.RulesExecutor@46d8f407)
at org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:40)
at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46)
at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$blockifyObject$2.apply(TorrentBroadcast.scala:291)
at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$blockifyObject$2.apply(TorrentBroadcast.scala:291)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
at org.apache.spark.broadcast.TorrentBroadcast$.blockifyObject(TorrentBroadcast.scala:292)
at org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:127)
at org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:88)
at org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
at org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:62)
at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1481)
at org.apache.spark.api.java.JavaSparkContext.broadcast(JavaSparkContext.scala:650)
at com.rsrit.cob.application.RecoverableClaimsMain.main(RecoverableClaimsMain.java:151)
18/08/20 11:46:46 INFO SparkContext: Invoking stop() from shutdown hook
18/08/20 11:46:46 INFO SparkUI: Stopped Spark web UI at http://WINDOWS-2G24VKQ:4040
18/08/20 11:46:46 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
18/08/20 11:46:46 INFO MemoryStore: MemoryStore cleared
18/08/20 11:46:46 INFO BlockManager: BlockManager stopped
18/08/20 11:46:46 INFO BlockManagerMaster: BlockManagerMaster stopped
18/08/20 11:46:46 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
18/08/20 11:46:46 INFO SparkContext: Successfully stopped SparkContext
18/08/20 11:46:46 INFO ShutdownHookManager: Shutdown hook called
18/08/20 11:46:46 INFO ShutdownHookManager: Deleting directory C:\Users\katuk\AppData\Local\Temp\spark-f12fc1c9-7cce-43e3-8a00-da2e753c49bd