AirFlow 連接訪問MYSQL

from airflow import DAG
from airflow.operators.python import PythonOperator
# from airflow.providers.mysql.operators.mysql import MySqlOperator
from airflow.operators.mysql_operator import MySqlOperator
from airflow.operators.dummy import DummyOperator
from liveramp.common.metadata import default_args
from airflow.models import Variable

from liveramp.common.notification import send_to_slack, generate_message_blocks_according_to_upstream
import uuid


def failure_callback(context):
    send_to_slack(generate_message_blocks_according_to_upstream(context))


def success_callback(context):
    send_to_slack(generate_message_blocks_according_to_upstream(context))


# uuid3 need namespace and each time input an different namespace
# value = uuid.uuid3(uuid.NAMESPACE_DNS, "AirFlow")
value = uuid.uuid4()
suid = ''.join(str(value).split("-"))
sql_ = 'insert job(job_id, tenant_id, tenant_env, tenant_display_name, tenant_name, tenant_settings) values( "' + str(
    suid) + '",111,"QA","TEST","Test_display_name","hdfghjdsagfdhsgf");'
sql_ = sql_ + "insert job_status(job_id, job_reference_id)  values ('{}',1);".format(suid)
sql_ = sql_ + "insert job_status_log(job_id, job_reference_id,memo)  values ('{}',1,'init insert');".format(suid)
print(sql_)

dag = DAG(
    'collect_requests_dag_backup',
    default_args=default_args,
    tags=['mysql', 'MySqlOperator'],
    # start_date=datetime(2021, 1, 1),
    schedule_interval=None,
    catchup=False,
)

start = DummyOperator(
    task_id='start',
    dag=dag)

end = DummyOperator(
    task_id='end',
    dag=dag)

sql_insert = MySqlOperator(
    task_id='sql_insert',
    mysql_conn_id='mysql_conn_id',
    sql=sql_,
    dag=dag,
)


start >> sql_insert >> end

 

 

走的是一個已經棄用的方法,

from airflow.operators.mysql_operator import MySqlOperator 實測可用


需要在Connection這一端進行配置ConnectionID,寫好後點擊Test按鈕

 

 

 

import 包的時候,如果放棄棄用的方法

from airflow.operators.mysql_operator import MySqlOperator

可以用新的依賴:
from airflow.providers.mysql.operators.mysql import MySqlOperator

 

from airflow import DAG
from airflow.providers.mysql.operators.mysql import MySqlOperator
from airflow.operators.dummy import DummyOperator
from liveramp.common.metadata import default_args

from liveramp.common.notification import send_to_slack, generate_message_blocks_according_to_upstream
import uuid


def failure_callback(context):
    send_to_slack(generate_message_blocks_according_to_upstream(context))


def success_callback(context):
    send_to_slack(generate_message_blocks_according_to_upstream(context))


value = uuid.uuid4()
suid = ''.join(str(value).split("-"))
sql_ = 'insert job(job_id, tenant_id, tenant_env, tenant_display_name, tenant_name, tenant_settings) values( "' + str(
    suid) + '",111,"QA","TEST","Test_display_name","hdfghjdsagfdhsgf");'
sql_ = sql_ + "insert job_status(job_id, job_reference_id)  values ('{}',1);".format(suid)
sql_ = sql_ + "insert job_status_log(job_id, job_reference_id,memo)  values ('{}',1,'init insert');".format(suid)
print(sql_)

dag = DAG(
    'collect_requests_dag',
    default_args=default_args,
    tags=['mysql', 'MySqlOperator'],
    # start_date=datetime(2021, 1, 1),
    schedule_interval=None,
    catchup=False,
)

start = DummyOperator(
    task_id='start',
    dag=dag)

end = DummyOperator(
    task_id='end',
    dag=dag)

sql_insert = MySqlOperator(
    task_id='sql_insert',
    mysql_conn_id='mysql_conn_id',
    sql=sql_,
    autocommit=True,
    dag=dag,
)

start >> sql_insert >> end

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章