无法导入plugins文件夹中python脚本中定义的CustomOperator

时间:2019-04-12 13:05:58

标签: python-3.x airflow

我正在尝试在apache-airflow中编写自定义操作员和传感器。 它基本上有3个操作员和1个传感器,第一个操作员/任务将调用某些python方法并在控制台上打印一些消息。之后,将调用第二个运算符,这是一个自定义运算符,位于一个名为“ custom_operator.py”的文件的plugins文件夹内。它将在mongo db数据库中插入数据。然后调用一个使用mongo_hook的自定义传感器,该传感器将监视db ans检查db中的值。它也位于插件内的同一文件custom_operator.py中。之后,将调用一个简单的python运算符。

我已经尝试过: Can't import Airflow plugins

``` 
home/autotest/airflow/dags/custom_dag1.py


import logging

from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from datetime import date_time, timedelta
from airflow.operators import InsertDb
from airflow.operators import DbSensor



log = logging.getLogger(__name__)

defaultArgs = {
    enter code here'owner': 'mohit_saumik',
    'depends_on_past': False,
    'start_date': date_time(2019,04,11,10,21,23)
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=1)
}

# creating first operator which will print on the console.
def print_operator_one():
    log.info("Operator One is executed.")
    return "Operator One is executed and returned"



# Creating third operator which will print on the console.
def print_operator_third():
    log.info("Operator three is executed")
    return "Operator two is executed and returned"


# Creating DAG
dag = DAG('custom_dag', default_args = defaultArgs, schedule_interval=timedelta(minutes=10))


# Creating task 1
operator_one_task = PythonOperator(task_id="task_1", python_callable="print_operator_one", dag=dag)


# Creating task 2
operator_two_task = InsertDb(my_operator_param="This is custom Operator", task_id="task_2", dag=dag)


# Creating Task 3
sensor_one_task = DbSensor(task_id="task_3", poke_interval=10, dag=dag, collection="demoCollection", query={"key1": "value1"})


# Creating task 4
operator_three_task = PythonOperator(task_id="task_4", python_callable="print_operator_third", dag=dag)


# Creating flow
operator_one_task >> operator_two_task >> sensor_one_task >> operator_three_task


```
    home/autotest/airflow/plugins/custom_operator.py



    import logging 

    from airflow.models import BaseOperator
    from airflow.plugins_manager import AirflowPlugin
    from airflow.utils.decorator import apply_defaults
    from airflow.contrib.hooks.mongo_hook import MongoHook
    from airflow.operators.sensors import BaseSensorOperator
    from datetime import datetime


    log = logging.getLogger(__name__)


    class InsertDb(BaseOperator):

        @apply_defaults
        def __init__(self, my_operator_param, *args, **kwargs):
            self.operator_param = my_operator_param
            super(InsertDb, self).__init__(*args, **kwargs)


        def execute(self, context):
            log.info("Inserting into the DB!")

            db_hook = MongoHook(self, conn_id="https://localhost,localhost:27017/mydb")
            db_conn = db_hook.get_conn()
            insertSuccess = db_conn.insert_one(mongo_collection="demoCollection",doc = {"key1": "value1"}, mongo_db="mydb" )

            log.info(insertSuccess)




    class DbSensor(BaseSensorOperator):

        @apply_defaults
        def __init__(self, collection, query, mongo_conn_id="mongo_default", *args, **kwargs):
            super(DbSensor,self).__init__(*args,**kwargs)



        def poke(self,context):
            db_hook = MongoHook(self, conn_id="https://localhost,localhost:27017/mydb")
            db_conn = db_hook.get_conn()
            result = db_conn.find(mongo_collection=collection, query=query, mongodb="mydb")

            if result is None:
                log.info("Data not available in DB")
                return False
            else:
                log.info("Data is available in DB")
                return True




    class DbPlugin(AirflowPlugin):
        name = "db_plugin"
        operators = [InsertDb, DbSensor]


I am not able to launch the webserver.
Getting the errors:

[2019-04-12 12:35:16,046] {models.py:377} ERROR - Failed to import: /home/autotest/airflow/dags/custom_dag1.py
Traceback (most recent call last):
  File "/home/autotest/virtualenv/airflow/lib/python3.6/site-packages/airflow/models.py", line 374, in process_file
    m = imp.load_source(mod_name, filepath)
  File "/home/autotest/virtualenv/airflow/lib/python3.6/imp.py", line 172, in load_source
    module = _load(spec)
  File "<frozen importlib._bootstrap>", line 684, in _load
  File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
  File "<frozen importlib._bootstrap_external>", line 678, in exec_module
  File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
  File "/home/autotest/airflow/dags/custom_dag1.py", line 41, in <module>
    operator_one_task = PythonOperator(task_id="task_1",python_callable="print_operator_one", dag=dag)
  File "/home/autotest/virtualenv/airflow/lib/python3.6/site-packages/airflow/utils/decorators.py", line 98, in wrapper
    result = func(*args, **kwargs)
  File "/home/autotest/virtualenv/airflow/lib/python3.6/site-packages/airflow/operators/python_operator.py", line 81, in __init__
    raise AirflowException('`python_callable` param must be callable')
airflow.exceptions.AirflowException: `python_callable` param must be callable

1 个答案:

答案 0 :(得分:0)

请不要使用引号:python_callable=print_operator_third。这样,您传递的是callable而不是string