This commit is contained in:
Grzegorz Michalski
2026-03-02 09:47:35 +01:00
commit 2c225d68ac
715 changed files with 130067 additions and 0 deletions

View File

@@ -0,0 +1,146 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_CSDB_DEBT_OU_CSDB_DEBT = DbtTaskGroup(
group_id="m_MOPDB_CSDB_DEBT_OU_CSDB_DEBT",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_CSDB_DEBT_OU_CSDB_DEBT",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_CSDB_DEBT_OU_CSDB_DEBT >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,156 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_LM_T_FC_ADH_ADJUSTMENTS_OU_LM_ADHOC_ADJUSTMENT = DbtTaskGroup(
group_id="m_MOPDB_LM_T_FC_ADH_ADJUSTMENTS_OU_LM_ADHOC_ADJUSTMENT",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_LM_T_FC_ADH_ADJUSTMENTS_OU_LM_ADHOC_ADJUSTMENT",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_LM_T_FC_ADH_ADJUSTMENTS_OU_LM_ADHOC_ADJUSTMENT >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,152 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_LM_T_BALANCE_SHEET_OU_LM_BALANCE_SHEET = DbtTaskGroup(
group_id="m_MOPDB_LM_T_BALANCE_SHEET_OU_LM_BALANCE_SHEET",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_LM_T_BALANCE_SHEET_OU_LM_BALANCE_SHEET",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_LM_T_BALANCE_SHEET_OU_LM_BALANCE_SHEET >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,147 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_LM_T_FC_CSM_ADJUSTMENTS_OU_LM_CSM_ADJUSTMENT = DbtTaskGroup(
group_id="m_MOPDB_LM_T_FC_CSM_ADJUSTMENTS_OU_LM_CSM_ADJUSTMENT",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_LM_T_FC_CSM_ADJUSTMENTS_OU_LM_CSM_ADJUSTMENT",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_LM_T_FC_CSM_ADJUSTMENTS_OU_LM_CSM_ADJUSTMENT >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,157 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_LM_T_CURRENT_ACCOUNTS_OU_MRR_CURRENT_ACCOUNTS = DbtTaskGroup(
group_id="m_MOPDB_LM_T_CURRENT_ACCOUNTS_OU_MRR_CURRENT_ACCOUNTS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_LM_T_CURRENT_ACCOUNTS_OU_MRR_CURRENT_ACCOUNTS",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_LM_T_CURRENT_ACCOUNTS_OU_MRR_CURRENT_ACCOUNTS >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,149 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_LM_T_FORECAST_OU_LM_FORECAST = DbtTaskGroup(
group_id="m_MOPDB_LM_T_FORECAST_OU_LM_FORECAST",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_LM_T_FORECAST_OU_LM_FORECAST",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_LM_T_FORECAST_OU_LM_FORECAST >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,155 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_LM_T_FC_QRE_ADJUSTMENTS_OU_LM_QRE_ADJUSTMENT = DbtTaskGroup(
group_id="m_MOPDB_LM_T_FC_QRE_ADJUSTMENTS_OU_LM_QRE_ADJUSTMENT",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_LM_T_FC_QRE_ADJUSTMENTS_OU_LM_QRE_ADJUSTMENT",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_LM_T_FC_QRE_ADJUSTMENTS_OU_LM_QRE_ADJUSTMENT >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,175 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_LM_T_STANDING_FACILITIES_OU_LM_STANDING_FACILITIES = DbtTaskGroup(
group_id="m_MOPDB_LM_T_STANDING_FACILITIES_OU_LM_STANDING_FACILITIES",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(
select=["tag:m_MOPDB_LM_T_STANDING_FACILITIES_OU_LM_STANDING_FACILITIES"],
exclude=["m_MOPDB_CSDB_DEBT_DAILY_OU_CSDB_DEBT_DAILY_SQ"],
),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
def trigger_devo_replicator(**context):
from airflow.api.common.trigger_dag import trigger_dag
target_dag_id = 'devo_replicator_trigger_mopdb'
# Create configuration dictionary
trigger_conf = {
"owner_table": "LM.T_STANDING_FACILITIES"
}
try:
dag_run = trigger_dag(
dag_id=target_dag_id,
conf=trigger_conf,
execution_date=None,
replace_microseconds=False
)
return dag_run.run_id
except Exception as e:
raise
trigger_replicator = PythonOperator(
task_id='trigger_devo_replicator',
python_callable=trigger_devo_replicator,
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_LM_T_STANDING_FACILITIES_OU_LM_STANDING_FACILITIES >> control_external_run_end >>trigger_replicator >> dag_status
globals()[DAG_NAME] = run_dag()

View File

View File

View File

@@ -0,0 +1,316 @@
import os
######
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from airflow.api.common.trigger_dag import trigger_dag
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
from mrds.utils.security_utils import get_verified_run_id, verify_run_id
from mrds.utils import oraconn
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
# Get verified run_id using security utilities
run_id = get_verified_run_id(kwargs)
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
def get_rqsd_tables_to_replicate(**kwargs):
"""
Get list of RQSD tables from a_devo_replica_mgmt_rqsd,
excluding tables ending with _COPY
"""
import logging
oracle_conn = None
try:
oracle_conn = oraconn.connect('MRDS_LOADER')
cursor = oracle_conn.cursor()
# Query to get all tables excluding _COPY versions
sql = """
SELECT OWNER, TABLE_NAME
FROM CT_MRDS.a_devo_replica_mgmt_rqsd
WHERE TABLE_NAME NOT LIKE '%_COPY'
ORDER BY OWNER, TABLE_NAME
"""
cursor.execute(sql)
tables = cursor.fetchall()
cursor.close()
logging.info(f"Found {len(tables)} RQSD tables to replicate (excluding _COPY versions)")
# Convert to list of owner.table_name format
table_list = [f"{owner}.{table_name}" for owner, table_name in tables]
# Push to XCom for next task
kwargs["ti"].xcom_push(key="rqsd_tables", value=table_list)
return table_list
except Exception as e:
logging.error(f"Error getting RQSD tables: {e}")
raise
finally:
if oracle_conn:
oracle_conn.close()
def trigger_rqsd_replication(**kwargs):
"""
Trigger devo_replicator_trigger_rqsd for each table in the list
"""
import logging
from datetime import datetime
ti = kwargs["ti"]
table_list = ti.xcom_pull(task_ids="get_rqsd_tables", key="rqsd_tables")
if not table_list:
logging.warning("No RQSD tables found to replicate")
return
logging.info(f"Triggering replication for {len(table_list)} tables")
triggered_count = 0
failed_triggers = []
for owner_table in table_list:
try:
conf = {
"owner_table": owner_table
}
trigger_dag(
dag_id='devo_replicator_trigger_rqsd',
conf=conf,
execution_date=None,
replace_microseconds=False
)
triggered_count += 1
logging.info(f"Successfully triggered replication for {owner_table}")
except Exception as e:
logging.error(f"Failed to trigger replication for {owner_table}: {e}")
failed_triggers.append(owner_table)
logging.info(f"Replication triggered for {triggered_count}/{len(table_list)} tables")
if failed_triggers:
logging.warning(f"Failed to trigger replication for: {', '.join(failed_triggers)}")
# Push results to XCom
ti.xcom_push(key="triggered_count", value=triggered_count)
ti.xcom_push(key="failed_triggers", value=failed_triggers)
return {
"triggered_count": triggered_count,
"total_tables": len(table_list),
"failed_triggers": failed_triggers
}
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA = DbtTaskGroup(
group_id="m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
# Get list of RQSD tables to replicate
get_rqsd_tables = PythonOperator(
task_id="get_rqsd_tables",
python_callable=get_rqsd_tables_to_replicate,
provide_context=True,
)
# Trigger replication for all RQSD tables
trigger_rqsd_replication_task = PythonOperator(
task_id="trigger_rqsd_replication",
python_callable=trigger_rqsd_replication,
provide_context=True,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
# dependency chain
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> [
m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS,
] >> m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA >> control_external_run_end >> get_rqsd_tables >> trigger_rqsd_replication_task >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,216 @@
"""
NEW CHANGES FOR MANUAL:
Changed DAG_NAME from dynamic file-based to hardcoded "w_MOPDB_RQSD_PROCESS_MANUAL"
Changed the last task group from m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA to m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA_MANUAL
Updated the tag in the RenderConfig to "tag:m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA_MANUAL"
Added tags=["MOPDB", "RQSD", "DBT", "MANUAL"] to the DAG decorator
KEEP IDENTICAL :
All other task groups remain the same as the original MOPDB DAG
Flow structure remains identical:
read_vars → retrieve_run_id → control_external_run_start
→ 5 parallel ANNEX tasks → MANUAL output task → control_external_run_end → dag_status
"""
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
from mrds.utils.security_utils import get_verified_run_id, verify_run_id
DAG_NAME = "w_MOPDB_RQSD_PROCESS_MANUAL"
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = get_verified_run_id(kwargs)
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
tags=["MOPDB", "RQSD", "DBT", "MANUAL"]
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS = DbtTaskGroup(
group_id="m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS",
]),
operator_args=common_operator_args,
)
m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA_MANUAL = DbtTaskGroup(
group_id="m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA_MANUAL",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA_MANUAL",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> [
m_MOPDB_RQSD_ANNEX_1_1_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_1_2_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_1_1_FIN_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_1_2_FIN_ALL_ODS_RQSD_OBSERVATIONS,
m_MOPDB_RQSD_ANNEX_2_ALL_ODS_RQSD_OBSERVATIONS,
] >> m_MOPDB_RQSD_OUTPUT_CURR_RQSD_NCB_SUBA_MANUAL >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

View File

@@ -0,0 +1,156 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_TOP_T_AGGREGATED_ALLOTMENT_OU_TOP_AGGREGATED_ALLOTMENT = DbtTaskGroup(
group_id="m_MOPDB_TOP_T_AGGREGATED_ALLOTMENT_OU_TOP_AGGREGATED_ALLOTMENT",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_TOP_T_AGGREGATED_ALLOTMENT_OU_TOP_AGGREGATED_ALLOTMENT",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_TOP_T_AGGREGATED_ALLOTMENT_OU_TOP_AGGREGATED_ALLOTMENT >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()

View File

@@ -0,0 +1,156 @@
import os
from airflow.decorators import dag
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.utils.trigger_rule import TriggerRule
from cosmos import DbtTaskGroup, ProfileConfig, ProjectConfig, RenderConfig
DAG_NAME = os.path.splitext(os.path.basename(__file__))[0]
ENV_NAME = os.getenv("MRDS_ENV", "").lower()
DATABASE_NAME_MAP = {
"dev": "MOPDB",
"test": "MOPDB_TEST",
}
DATABASE_NAME = DATABASE_NAME_MAP.get(ENV_NAME, "MOPDB")
# check cron 2
dbt_root_path = "/opt/dbt"
dbt_profiles_dir = "/opt/dbt/profiles.yml"
dbt_profiles_dir_parent = "/opt/dbt"
dbt_env = {
"DBT_PROFILES_DIR": dbt_profiles_dir_parent,
"DBT_TARGET": ENV_NAME,
"MRDS_LOADER_DB_USER": os.getenv("MRDS_LOADER_DB_USER"),
"MRDS_LOADER_DB_PASS": os.getenv("MRDS_LOADER_DB_PASS"),
"MRDS_LOADER_DB_TNS": os.getenv("MRDS_LOADER_DB_TNS", "XE"),
"MRDS_SCHEMA": os.getenv("MRDS_SCHEMA", "CT_MRDS"),
"MRDS_PROTOCOL": os.getenv("MRDS_PROTOCOL", "tcps"),
"MRDS_THREADS": os.getenv("MRDS_THREADS", "4"),
"DBT_LOG_PATH": "/opt/dbt/logs",
"DBT_TARGET_PATH": "/opt/dbt/target",
"PYTHONUNBUFFERED": "1",
}
def retrieve_run_id(**kwargs):
run_id = kwargs["run_id"]
kwargs["ti"].xcom_push(key="run_id", value=run_id)
return run_id
def check_dag_status(**kwargs):
for ti in kwargs["dag_run"].get_task_instances():
if ti.state == "failed" and ti.task_id != kwargs["task_instance"].task_id:
raise Exception(f"Task {ti.task_id} failed. Failing this DAG run")
@dag(
dag_id=DAG_NAME,
schedule_interval=None,
start_date=days_ago(2),
catchup=False,
)
def run_dag():
def read_vars(**context):
BUCKET = os.getenv("INBOX_BUCKET")
BUCKET_NAMESPACE = os.getenv("BUCKET_NAMESPACE")
print("========= DBT ENV =========")
print(f"BUCKET_NAMESPACE: {BUCKET_NAMESPACE}, BUCKET : {BUCKET}")
return 1
read_vars_task = PythonOperator(
task_id="read_vars",
python_callable=read_vars,
provide_context=True,
)
retrieve_run_id_task = PythonOperator(
task_id="retrieve_run_id",
python_callable=retrieve_run_id,
provide_context=True,
)
control_external_run_start = BashOperator(
task_id="control_external_run_start",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_start \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
)
common_profile = ProfileConfig(
profiles_yml_filepath=dbt_profiles_dir,
profile_name="mrds",
target_name=ENV_NAME,
)
common_project = ProjectConfig(dbt_project_path=dbt_root_path)
common_vars = {
"orchestration_run_id": "{{ ti.xcom_pull(task_ids='retrieve_run_id', key='run_id') }}",
"input_service_name": DATABASE_NAME,
"workflow_name": DAG_NAME,
}
common_operator_args = {
"vars": common_vars,
"env": dbt_env,
}
m_MOPDB_TOP_T_ALLOTMENT_OU_TOP_ALLOTMENT = DbtTaskGroup(
group_id="m_MOPDB_TOP_T_ALLOTMENT_OU_TOP_ALLOTMENT",
project_config=common_project,
profile_config=common_profile,
render_config=RenderConfig(select=[
"tag:m_MOPDB_TOP_T_ALLOTMENT_OU_TOP_ALLOTMENT",
]),
operator_args=common_operator_args,
)
control_external_run_end = BashOperator(
task_id="control_external_run_end",
params={"db": DATABASE_NAME, "wf": DAG_NAME},
env=dbt_env,
bash_command="""
set -euxo pipefail
cd /opt/dbt
dbt --log-format json --log-level debug --debug --log-path /opt/dbt/logs \
run-operation control_external_run_end \
--vars '{{ {
"orchestration_run_id": ti.xcom_pull(task_ids="retrieve_run_id", key="run_id"),
"input_service_name": params.db,
"workflow_name": params.wf
} | tojson }}'
""",
trigger_rule=TriggerRule.ALL_DONE,
)
dag_status = PythonOperator(
task_id="dag_status",
provide_context=True,
python_callable=check_dag_status,
trigger_rule=TriggerRule.ALL_DONE,
)
read_vars_task >> retrieve_run_id_task >> control_external_run_start >> m_MOPDB_TOP_T_ALLOTMENT_OU_TOP_ALLOTMENT >> control_external_run_end >> dag_status
globals()[DAG_NAME] = run_dag()