最初,我想通过xcom_push
/ xcom_pull
(由Airflow提供的功能)将父DAG的价值传递给子市场,但尝试了这么多次后,我发现它没有任何作用。
有人可以帮助我实现父级和子级之间的价值传递吗?
这是我的公司代码。我只想将daily_check
从父级提供给子级。
# function used by parent DAG
def templated_daily_check(ds, **kwargs):
hour_2_check_daily = kwargs['hour_2_check_daily']
zk_hosts = kwargs['zk_hosts']
zk_path_daily = kwargs['zk_path_daily']
check_time = kwargs['check_time']
# invoke custom python function dailyCheck to get zookeeper data
dailyOrNot = dailyTask.dailyCheck(zk_hosts, zk_path_daily, hour_2_check_daily, check_time)
print ("=====================================================================================================>")
print (dailyOrNot)
# daily_check:data that need to be transferred to subdag
kwargs['ti'].xcom_push(key='daily_check', value=dailyOrNot)
if dailyOrNot:
dailyTask.dailyMark(zk_hosts, zk_path_daily, check_time)
# function to create sub DAG
def sub_dag_iaoc_or_iaocPlusaauoo(parent_dag_name, child_dag_name, all_task_name):
subDag = DAG(
dag_id='%s.%s' % (parent_dag_name, child_dag_name),
default_args=default_argsi, schedule_interval=timedelta(hours=dag_duration_hours)
)
# daily_check:data that need to be received from parent dag,and my way to receive data from parent dag as follows,but not effect
daily_check = os.popen('''echo {{ task_instance.xcom_pull(task_ids='daily_check_bf_all', key='daily_check', dag_id=%s) }}''' % (parent_dag_name)).read()
# Based on the values received above,to determine the number of operators generated
if daily_check == str(False):
t_r_fm_all_iaoc = BashOperator(
task_id='r_fm_all_iaoc',
bash_command= """{}/fm/sh/airflow_preparerun_task.sh {} {} r_fm_all_iaoc""".format(USER_HOME_FOLDER, trade_jar_rev, USER_HOME_FOLDER) + ";" +
"""{} {} {} {} {} {} -r {} -iaoc """.format(spark_submit_fixed_conf, yarn_res_fixed_args, trade_class_fm, '{}/fm/jar/{}/fmtrade-{}.jar'.format(USER_HOME_FOLDER, 'r_fm_all_iaoc', trade_jar_rev), trade_fixed_args, trade_task_fixed_args_fm, all_task_name),
dag=subDag
)
else:
t_r_fm_all_iaoc = BashOperator(
task_id='r_fm_all_iaoc',
bash_command= """{}/fm/sh/airflow_preparerun_task.sh {} {} r_fm_all_iaoc""".format(USER_HOME_FOLDER, trade_jar_rev, USER_HOME_FOLDER) + ";" +
"""{} {} {} {} {} {} -r {} -iaoc """.format(spark_submit_fixed_conf, yarn_res_fixed_args, trade_class_fm, '{}/fm/jar/{}/fmtrade-{}.jar'.format(USER_HOME_FOLDER, 'r_fm_all_iaoc', trade_jar_rev), trade_fixed_args, trade_task_fixed_args_fm, all_task_name),
dag=subDag
)
t_r_fm_all_aauoo = BashOperator(
task_id='r_fm_all_aauoo',
bash_command= """{}/fm/sh/airflow_preparerun_task.sh {} {} r_fm_all_aauoo""".format(USER_HOME_FOLDER, trade_jar_rev, USER_HOME_FOLDER) + ";" +
"""{} {} {} {} {} {} -r {} -aauoo """.format(spark_submit_fixed_conf, yarn_res_fixed_args, trade_class_fm, '{}/fm/jar/{}/fmtrade-{}.jar'.format(USER_HOME_FOLDER, 'r_fm_all_aauoo', trade_jar_rev), trade_fixed_args, trade_task_fixed_args_fm_all, all_task_name),
dag=subDag
)
return subDag
t_daily_check_bf_all = PythonOperator(
task_id='daily_check_bf_all',
provide_context=True,
python_callable=templated_daily_check,
op_kwargs={
'hour_2_check_daily': hour_2_check_daily,
'zk_hosts': zk_hosts,
'zk_path_daily': zk_path_daily,
"check_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
},
dag=dag
)
t_r_fm_all_subdag = SubDagOperator(
subdag=sub_dag_iaoc_or_iaocPlusaauoo('afFMTradeDag',
'iaoc_or_iaocPlusaauoo',
'all'),
task_id='iaoc_or_iaocPlusaauoo',
dag=dag
)