我编写了一个oozie工作流程,用于将文件从本地复制到hdfs。它在运行作业后没有显示任何错误,但它没有将文件放到hdfs
这是我的代码
nameNode=hdfs://localhost:8020
jobTracker=localhost:8032
queueName=default
oozie.wf.application.path=${nameNode}/crazyoozie
focusNodeLogin=cloudera
shellScriptPath= /home/cloudera/Desktop/script.sh
workflow.xml
<workflow-app name="WorkFlowForSshAction" xmlns="uri:oozie:workflow:0.1">
<start to="sshAction"/>
<action name="sshAction">
<ssh xmlns="uri:oozie:ssh-action:0.1">
<host>${focusNodeLogin}</host>
<command>${shellScriptPath}</command>
<capture-output/>
</ssh>
<ok to="end"/>
<error to="killAction"/>
</action>
<kill name="killAction">
<message>"Killed job due to error"</message>
</kill>
<end name="end"/>
</workflow-app>`
script.sh
hadoop fs -put /home/cloudera/Desktop/oozieinput /oozieresults-sshAction
status=$?
if [ $status = 0 ]; then
echo "STATUS=SUCCESS"
else
echo "STATUS=FAIL"
fi
script.sh
位于本地FS中。输出目录oozieresults-sshAction在hdfs上。
能帮到我吗
答案 0 :(得分:0)
malloc