我正在尝试通过Oozie工作流的Shell节点中的Spark Submit运行Spark jar。我得到间歇性的JA018错误。它做oozie启动器地图减少任务,说它成功,但从未运行Spark jar。我无法在任何纱线或Oozie原木中找到任何东西。我错过了一些明显的东西吗?
workflow.xml
<workflow-app xmlns="uri:oozie:workflow:0.4" name="example-workflow">
<start to="check-path" />
<decision name="check-path">
<switch>
<case to="run-spark-job">
${fs:dirSize(hdfsPath) gt 0}
</case>
<default to="end" />
</switch>
</decision>
<action name="run-spark-job">
<shell xmlns="uri:oozie:shell-action:0.2">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapred.job.queue.name</name>
<value>${queueName}</value>
</property>
</configuration>
<exec>run-spark-job.sh</exec>
<argument>${hdfsPath}</argument>
<env-var>HADOOP_USER_NAME=${userToRunAs}</env-var>
<file>${workflowPath}/run-spark-job.sh#run-spark-job.sh</file>
<capture-output/>
</shell>
<ok to="end"/>
<error to="fail"/>
</action>
<kill name="fail">
<message>Run spark job action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<end name="end"/>
</workflow-app>
job.properties
nameNode=hdfs://{{ namenode_ip}}:8020
jobTracker={{ jobtracker_ip }}:8032
hiveIp={{ hive_ip }}
queueName=default
workflowsRoot=workflows
nameNodeIP={{ namenode_ip }}
userToRunAs=oozie
workflowPath=${nameNode}/user/${user.name}/${workflowsRoot}/example-workflow
oozie.coord.application.path=${nameNode}/user/${user.name}/${workflowsRoot}/example-workflow
workflowAppPath=${nameNode}/user/${user.name}/${workflowsRoot}/example-workflow
start={{ start }}
end={{ end }}
user.name=oozie
oozie.use.system.libpath=true
coordinator.xml
<coordinator-app name="example-coordinator" frequency="${coord:days(1)}" start="${start}" end="${end}" timezone="UTC"
xmlns="uri:oozie:coordinator:0.2">
<action>
<workflow>
<app-path>${workflowAppPath}</app-path>
<configuration>
<property>
<name>jobTracker</name>
<value>${jobTracker}</value>
</property>
<property>
<name>nameNode</name>
<value>${nameNode}</value>
</property>
<property>
<name>queueName</name>
<value>${queueName}</value>
</property>
<property>
<name>hdfsPath</name>
<value>/user/hive/warehouse/test</value>
</property>
</configuration>
</workflow>
</action>
</coordinator-app>