我正在编写一个气流任务来读取一个大型的csv并将其保存到postgresql数据库中。 我发现此asyncpg软件包具有复制功能,其运行速度比任何其他软件包都快。但是,它是异步的,我不知道如何将其合并到Airflow中。 这是示例代码:
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from datetime import datetime, timedelta
from pandas import DataFrame
import asyncpg
async def to_sql(dataframe, table_name, schema_name='public', timeout=None, truncate=False):
connection = await asyncpg.connect(user='postgres', host='host.docker.internal', database='quantaxis', password='123456')
result = await connection.copy_records_to_table(
table_name,
records=dataframe.values.tolist(),
columns=shared_columns,
schema_name=schema_name,
timeout=timeout)
await connection.close()
return result
default_args = {
'owner': 'Airflow',
'depends_on_past': False,
'start_date': datetime(2020, 1, 1),
'retries': 1,
'retry_delay': timedelta(minutes=1),
}
dag = DAG('pythonexp2123', default_args=default_args, schedule_interval=timedelta(days=1))
async def save_file_to_database(ds):
df = pd.read_csv("data{0}.csv".format(ds))
r = await to_sql(df, 'test')
return r
t1 = PythonOperator(
task_id='pushing_task',
provide_context=True,
python_callable=save_file_to_database,
dag=dag
)
t1
当我运行它时,它将返回错误:
Can't Pickle Object <Corountine>
如何更改功能以使Dag正常工作?由于其速度,我仍然要使用asyncpg软件包。
答案 0 :(得分:4)
您可以尝试使用asyncio在事件循环中运行异步功能。 如果您使用的是python 3.7>,则只需调用asyncio.run(async_function())
https://docs.python.org/3/library/asyncio-task.html
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from datetime import datetime, timedelta
from pandas import DataFrame
import asyncpg
import asyncio
async def to_sql(dataframe, table_name, schema_name='public', timeout=None, truncate=False):
connection = await asyncpg.connect(user='postgres', host='host.docker.internal', database='quantaxis', password='123456')
result = await connection.copy_records_to_table(
table_name,
records=dataframe.values.tolist(),
columns=shared_columns,
schema_name=schema_name,
timeout=timeout)
await connection.close()
return result
default_args = {
'owner': 'Airflow',
'depends_on_past': False,
'start_date': datetime(2020, 1, 1),
'retries': 1,
'retry_delay': timedelta(minutes=1),
}
dag = DAG('pythonexp2123', default_args=default_args, schedule_interval=timedelta(days=1))
async def save_file_to_database(ds):
df = pd.read_csv("data{0}.csv".format(ds))
r = await to_sql(df, 'test')
return r
def run_async(ds):
loop = asyncio.get_event_loop()
result = loop.run_until_complete(save_file_to_database(ds))
return result
t1 = PythonOperator(
task_id='pushing_task',
provide_context=True,
python_callable=run_async,
dag=dag
)
t1