Files
mars-elt/airflow/TestDags/archive/devo_replicator_test.py
Grzegorz Michalski 2c225d68ac init
2026-03-02 09:47:35 +01:00

113 lines
3.4 KiB
Python

from airflow import DAG
from airflow.operators.python import PythonOperator
from airflow.providers.oracle.hooks.oracle import OracleHook
from airflow.utils.dates import days_ago
from datetime import timedelta
import logging
p_run_id = 1234
p_service_name = 'MyService'
p_table_owner = 'MY_SCHEMA'
p_table_name = 'MY_TABLE'
p_objectstore_uri = 's3://bucket/uri' # subject to change appropriate for RAR/MOPDB
def start_log_table_task(**context):
proc_call = "BEGIN MRDS_LOADER.DATA_REPLICATOR.start_log_table(:1, :2, :3, :4); END;"
try:
oracle_hook = OracleHook(oracle_conn_id='oracle_default')
conn = oracle_hook.get_conn()
cursor = conn.cursor()
cursor.execute(proc_call, [p_run_id, p_service_name, p_table_owner, p_table_name])
conn.commit()
cursor.close()
conn.close()
logging.info("start_log_table executed successfully.")
except Exception as e:
logging.error("Failed to execute start_log_table: %s", e, exc_info=True)
raise
def export_table_task(**context):
proc_call = "BEGIN MRDS_LOADER.DATA_REPLICATOR.export_table(:1, :2, :3, :4); END;"
try:
oracle_hook = OracleHook(oracle_conn_id='oracle_default')
conn = oracle_hook.get_conn()
cursor = conn.cursor()
cursor.execute(proc_call, [p_service_name, p_table_owner, p_table_name, p_objectstore_uri])
conn.commit()
cursor.close()
conn.close()
logging.info("export_table executed successfully.")
except Exception as e:
logging.error("Failed to execute export_table: %s", e, exc_info=True)
raise
def devo_impyla_task(**context):
# Placeholder for Impyla (Devo) code
# Example for future:
# from impala.dbapi import connect
# conn = connect(host="...", port=21050)
# cursor = conn.cursor()
# cursor.execute("...")
logging.info("Impyla (Devo) task placeholder ran. Please implement.")
def end_log_table_task(**context):
proc_call = "BEGIN MRDS_LOADER.DATA_REPLICATOR.end_log_table(:1, :2, :3); END;"
try:
oracle_hook = OracleHook(oracle_conn_id='oracle_default')
conn = oracle_hook.get_conn()
cursor = conn.cursor()
cursor.execute(proc_call, [p_service_name, p_table_owner, p_table_name])
conn.commit()
cursor.close()
conn.close()
logging.info("end_log_table executed successfully.")
except Exception as e:
logging.error("Failed to execute end_log_table: %s", e, exc_info=True)
raise
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': days_ago(1),
'email_on_failure': False,
'email_on_retry': False,
'retries': 2,
'retry_delay': timedelta(minutes=5),
}
with DAG(
dag_id='rqsd_devo_replicator_test_old',
default_args=default_args,
description='Run Devo replicator workflow',
schedule_interval=None,
catchup=False,
tags=['Devo', 'RQSD', 'Replicator'],
) as dag:
t1 = PythonOperator(
task_id='start_log_table',
python_callable=start_log_table_task,
)
t2 = PythonOperator(
task_id='export_table',
python_callable=export_table_task,
)
t3 = PythonOperator(
task_id='devo_impyla',
python_callable=devo_impyla_task,
)
t4 = PythonOperator(
task_id='end_log_table',
python_callable=end_log_table_task,
)
t1 >> t2 >> t3 >> t4