Compare commits
75 Commits
26aba08759
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9d4056451 | ||
|
|
60b218d211 | ||
|
|
819b6f7880 | ||
|
|
c68d5bfe2c | ||
|
|
c607bbe26e | ||
|
|
1569237306 | ||
|
|
472a724fe0 | ||
|
|
04d4f6ac02 | ||
|
|
ca5d8b320c | ||
|
|
2605896469 | ||
|
|
b588b0bb72 | ||
|
|
6060f93fde | ||
|
|
99aca3af40 | ||
|
|
1089184367 | ||
|
|
e538706896 | ||
|
|
ff034fcd68 | ||
|
|
b85172ae84 | ||
|
|
577c94f363 | ||
|
|
11723f6c88 | ||
|
|
b63be15f5d | ||
|
|
28972e7428 | ||
|
|
866b886c70 | ||
|
|
3848a9471b | ||
|
|
cafa30f8f3 | ||
|
|
a0f4146a24 | ||
|
|
2b116c0256 | ||
|
|
204616252a | ||
|
|
93d2e537a9 | ||
|
|
2143a81aa0 | ||
|
|
b6cf54f103 | ||
|
|
6ade77c3c0 | ||
|
|
8ce07c3360 | ||
|
|
64dc830a2b | ||
|
|
020dacb571 | ||
|
|
057a4e7ce3 | ||
|
|
a9a51f19be | ||
|
|
fc6304c60b | ||
|
|
912b7a6466 | ||
|
|
b6f6446232 | ||
|
|
aa2f2f13f9 | ||
|
|
5c77d42d9a | ||
|
|
d370b9c9ef | ||
|
|
c5d39fc01d | ||
|
|
80916ea302 | ||
|
|
a4ab56e96d | ||
|
|
92c5261215 | ||
|
|
b34c942a8f | ||
|
|
d175179ddc | ||
|
|
d8a5edc3b1 | ||
|
|
d6c34085f7 | ||
|
|
2c40f091e0 | ||
|
|
255044f23b | ||
|
|
0053ecc556 | ||
|
|
3b7ce7de97 | ||
|
|
a3ec31341b | ||
|
|
6039d6bce4 | ||
|
|
a7d286b1e6 | ||
|
|
f2bcdb3a76 | ||
|
|
15dafbff2a | ||
|
|
3261aa1a6d | ||
|
|
1327410880 | ||
|
|
f4e9fb9cb2 | ||
|
|
f8213b76ab | ||
|
|
641af7415f | ||
|
|
c1c3890a1a | ||
|
|
63ed05930e | ||
|
|
5320db627b | ||
|
|
3171ff2ddf | ||
|
|
d237c2d7aa | ||
|
|
96e3e2f845 | ||
|
|
3eb39091de | ||
|
|
113b9f55e3 | ||
|
|
6c8b22eac9 | ||
|
|
293f2873b7 | ||
|
|
7864c568d7 |
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -121,6 +121,13 @@
|
|||||||
"password": "Cloudpass#34",
|
"password": "Cloudpass#34",
|
||||||
"connectionString": "ggmichalski_high",
|
"connectionString": "ggmichalski_high",
|
||||||
"walletLocation": "c:\\_git\\OracleAI\\oracledb1\\Wallet_ggmichalski"
|
"walletLocation": "c:\\_git\\OracleAI\\oracledb1\\Wallet_ggmichalski"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "OU_C2D@ggmichalski_high",
|
||||||
|
"username": "OU_C2D",
|
||||||
|
"password": "Cloudpass#34",
|
||||||
|
"connectionString": "ggmichalski_high",
|
||||||
|
"walletLocation": "c:\\_git\\OracleAI\\oracledb1\\Wallet_ggmichalski"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER',
|
||||||
pParallelDegree => 1
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_ADHOC_ADJUSTMENTS_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -44,7 +46,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM',
|
||||||
pParallelDegree => 1
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_ADHOC_ADJUSTMENTS_ITEM',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -64,7 +68,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pParallelDegree => 1
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -29,7 +29,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER',
|
||||||
pParallelDegree => 4
|
pParallelDegree => 4,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_BALANCESHEET_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -49,7 +51,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM',
|
||||||
pParallelDegree => 16
|
pParallelDegree => 16,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_BALANCESHEET_ITEM',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -24,7 +24,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER',
|
||||||
pParallelDegree => 1
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CSM_ADJUSTMENTS_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -44,7 +46,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM',
|
||||||
pParallelDegree => 2
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CSM_ADJUSTMENTS_ITEM',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -64,7 +68,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pParallelDegree => 2
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -29,7 +29,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES',
|
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES',
|
||||||
pParallelDegree => 8
|
pParallelDegree => 8,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -49,7 +51,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER',
|
||||||
pParallelDegree => 2
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -25,7 +25,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER',
|
||||||
pParallelDegree => 2
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CURRENT_ACCOUNTS_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -45,7 +47,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM',
|
||||||
pParallelDegree => 16
|
pParallelDegree => 16,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CURRENT_ACCOUNTS_ITEM',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -29,7 +29,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER',
|
||||||
pParallelDegree => 4
|
pParallelDegree => 4,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_FORECAST_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -49,7 +51,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM',
|
||||||
pParallelDegree => 16
|
pParallelDegree => 16,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_FORECAST_ITEM',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -24,7 +24,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER',
|
||||||
pParallelDegree => 1
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_QRE_ADJUSTMENTS_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -44,7 +46,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM',
|
||||||
pParallelDegree => 4
|
pParallelDegree => 4,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_QRE_ADJUSTMENTS_ITEM',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -64,7 +68,7 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pParallelDegree => 2
|
pParallelDegree => 2, pTemplateTableName => 'CT_ET_TEMPLATES.LM_QRE_ADJUSTMENTS_ITEM_HEADER', pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -24,7 +24,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER',
|
||||||
pParallelDegree => 1
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_TTS_HEADER',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
@@ -44,7 +46,9 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM',
|
||||||
pParallelDegree => 1
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_TTS_ITEM',
|
||||||
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ PROMPT ========================================
|
|||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD (
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD (
|
||||||
ARCHIVAL_STRATEGY VARCHAR2(30) DEFAULT 'THRESHOLD_BASED' NOT NULL,
|
ARCHIVAL_STRATEGY VARCHAR2(30) DEFAULT 'THRESHOLD_BASED' NOT NULL,
|
||||||
MINIMUM_AGE_MONTHS NUMBER(3) DEFAULT NULL,
|
MINIMUM_AGE_MONTHS NUMBER(3) DEFAULT NULL,
|
||||||
ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
IS_ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
||||||
KEEP_IN_TRASH CHAR(1) DEFAULT 'Y' NOT NULL
|
IS_KEEP_IN_TRASH CHAR(1) DEFAULT 'Y' NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Add check constraints
|
-- Add check constraints
|
||||||
@@ -22,10 +22,10 @@ ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
|||||||
);
|
);
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
||||||
CHK_ARCHIVE_ENABLED CHECK (ARCHIVE_ENABLED IN ('Y', 'N'));
|
CHK_IS_ARCHIVE_ENABLED CHECK (IS_ARCHIVE_ENABLED IN ('Y', 'N'));
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
||||||
CHK_KEEP_IN_TRASH CHECK (KEEP_IN_TRASH IN ('Y', 'N'));
|
CHK_IS_KEEP_IN_TRASH CHECK (IS_KEEP_IN_TRASH IN ('Y', 'N'));
|
||||||
|
|
||||||
-- Add comments
|
-- Add comments
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
||||||
@@ -34,10 +34,10 @@ COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
|||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
||||||
'Minimum age in months for archival (used with MINIMUM_AGE_MONTHS or HYBRID strategies)';
|
'Minimum age in months for archival (used with MINIMUM_AGE_MONTHS or HYBRID strategies)';
|
||||||
|
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_ENABLED IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS
|
||||||
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process. Added in MARS-828 v3.3.0';
|
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process. Added in MARS-828 v3.3.0';
|
||||||
|
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS
|
||||||
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy. Added in MARS-828 v3.3.0';
|
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy. Added in MARS-828 v3.3.0';
|
||||||
|
|
||||||
-- Verify columns added
|
-- Verify columns added
|
||||||
@@ -50,7 +50,7 @@ SELECT
|
|||||||
FROM all_tab_columns
|
FROM all_tab_columns
|
||||||
WHERE owner = 'CT_MRDS'
|
WHERE owner = 'CT_MRDS'
|
||||||
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'ARCHIVE_ENABLED', 'KEEP_IN_TRASH')
|
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'IS_ARCHIVE_ENABLED', 'IS_KEEP_IN_TRASH')
|
||||||
ORDER BY column_id;
|
ORDER BY column_id;
|
||||||
|
|
||||||
PROMPT ========================================
|
PROMPT ========================================
|
||||||
|
|||||||
@@ -0,0 +1,49 @@
|
|||||||
|
-- MARS-828: Rename threshold columns for consistency
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-01-28
|
||||||
|
-- Description: Renames threshold columns to use consistent ARCHIVE_THRESHOLD_* prefix pattern
|
||||||
|
-- Old naming was inconsistent (DAYS_FOR vs FILES_COUNT_OVER)
|
||||||
|
-- New naming groups all threshold columns with common prefix
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Renaming threshold columns for consistency
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- Rename threshold columns to consistent ARCHIVE_THRESHOLD_* pattern
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN DAYS_FOR_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_DAYS;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN FILES_COUNT_OVER_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_FILES_COUNT;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN BYTES_SUM_OVER_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_BYTES_SUM;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ROWS_COUNT_OVER_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_ROWS_COUNT;
|
||||||
|
|
||||||
|
-- Verify column renames
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Verifying threshold column renames...
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
column_name,
|
||||||
|
data_type,
|
||||||
|
data_length
|
||||||
|
FROM all_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
|
AND column_name LIKE 'ARCHIVE_THRESHOLD%'
|
||||||
|
ORDER BY column_id;
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Expected columns:
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_DAYS
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_FILES_COUNT
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_BYTES_SUM
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_ROWS_COUNT
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
PROMPT Threshold columns renamed successfully
|
||||||
|
PROMPT ========================================
|
||||||
@@ -0,0 +1,160 @@
|
|||||||
|
-- =====================================================================
|
||||||
|
-- Script: 01b_MARS_828_add_column_comments.sql
|
||||||
|
-- MARS Issue: MARS-828
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Purpose: Add comprehensive column comments for A_SOURCE_FILE_CONFIG and A_SOURCE_FILE_RECEIVED tables
|
||||||
|
-- Description: Documents all columns to improve database maintainability and user understanding
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Adding comprehensive column comments
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- A_SOURCE_FILE_CONFIG Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Adding column comments for A_SOURCE_FILE_CONFIG...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
|
'Primary key - unique identifier for source file configuration record';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY IS
|
||||||
|
'Foreign key to A_SOURCE table - identifies the source system (e.g., LM, C2D, CSDB)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE IS
|
||||||
|
'Type of file configuration: INPUT (data files), CONTAINER (xml files), or LOAD_CONFIG (configuration files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID IS
|
||||||
|
'Unique identifier for the source file within the source system (e.g., UC_DISSEM, STANDING_FACILITIES)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC IS
|
||||||
|
'Human-readable description of the source file and its purpose';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN IS
|
||||||
|
'Filename pattern for matching incoming files (supports wildcards, e.g., UC_NMA_DISSEM-*.csv)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID IS
|
||||||
|
'Identifier for the target table where data will be loaded (without schema prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME IS
|
||||||
|
'Fully qualified name of template table in CT_ET_TEMPLATES schema used for external table creation';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY IS
|
||||||
|
'Foreign key to parent container configuration when this file is part of an xml (NULL for standalone files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_DAYS IS
|
||||||
|
'Threshold for THRESHOLD_BASED strategy: archive data older than N days';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_FILES_COUNT IS
|
||||||
|
'Trigger archival when file count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_BYTES_SUM IS
|
||||||
|
'Trigger archival when total size in bytes exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_ROWS_COUNT IS
|
||||||
|
'Trigger archival when total row count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ODS_SCHEMA_NAME IS
|
||||||
|
'Schema name where ODS external tables are created (typically ODS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.HOURS_TO_EXPIRE_STATISTICS IS
|
||||||
|
'Number of hours before table statistics expire and need to be recalculated';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
||||||
|
'Archival strategy: THRESHOLD_BASED (days-based), MINIMUM_AGE_MONTHS (0=current month, N=retain N months), HYBRID (combination)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
||||||
|
'Minimum age in months before archival (required for MINIMUM_AGE_MONTHS and HYBRID strategies, 0=current month only)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS
|
||||||
|
'Oracle character set name for CSV files (e.g., UTF8, WE8MSWIN1252, EE8ISO8859P2)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS
|
||||||
|
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS
|
||||||
|
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- A_SOURCE_FILE_RECEIVED Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Adding column comments for A_SOURCE_FILE_RECEIVED...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY IS
|
||||||
|
'Primary key - unique identifier for received file record';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
|
'Foreign key to A_SOURCE_FILE_CONFIG - links file to its configuration and processing rules';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME IS
|
||||||
|
'Full object name/path of the received file in OCI Object Storage (includes INBOX prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CHECKSUM IS
|
||||||
|
'MD5 checksum of file content for integrity verification and duplicate detection';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CREATED IS
|
||||||
|
'Timestamp with timezone when file was created/uploaded to Object Storage (from DBMS_CLOUD.LIST_OBJECTS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.BYTES IS
|
||||||
|
'File size in bytes';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE IS
|
||||||
|
'Date when file was registered in the system (extracted from CREATED timestamp)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS IS
|
||||||
|
'Current processing status: RECEIVED → VALIDATED → READY_FOR_INGESTION → INGESTED → ARCHIVED_AND_TRASHED → ARCHIVED_AND_PURGED';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME IS
|
||||||
|
'Name of temporary external table created for file validation (dropped after validation)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_YEAR IS
|
||||||
|
'Year partition value (YYYY format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_MONTH IS
|
||||||
|
'Month partition value (MM format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.ARCH_PATH IS
|
||||||
|
'Archive directory prefix in ARCHIVE bucket containing archived Parquet files (supports multiple files from parallel DBMS_CLOUD.EXPORT_DATA)';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Verification
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Verifying column comments...
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
COUNT(*) as total_columns,
|
||||||
|
COUNT(comments) as documented_columns,
|
||||||
|
COUNT(*) - COUNT(comments) as undocumented_columns
|
||||||
|
FROM all_col_comments
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name IN ('A_SOURCE_FILE_CONFIG', 'A_SOURCE_FILE_RECEIVED')
|
||||||
|
GROUP BY table_name
|
||||||
|
ORDER BY table_name;
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Detailed column documentation status:
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
column_name,
|
||||||
|
CASE WHEN comments IS NULL THEN 'MISSING' ELSE 'OK' END as comment_status
|
||||||
|
FROM all_col_comments
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name IN ('A_SOURCE_FILE_CONFIG', 'A_SOURCE_FILE_RECEIVED')
|
||||||
|
ORDER BY table_name, column_name;
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Column comments added successfully
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT A_SOURCE_FILE_CONFIG: All 20 columns documented
|
||||||
|
PROMPT A_SOURCE_FILE_RECEIVED: All 12 columns documented
|
||||||
|
PROMPT ========================================
|
||||||
@@ -59,9 +59,23 @@ WHERE owner = 'CT_MRDS'
|
|||||||
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
ORDER BY object_type;
|
ORDER BY object_type;
|
||||||
|
|
||||||
-- 5. Check for compilation errors
|
-- 5. Check FILE_MANAGER package compilation status
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT 5. Checking for compilation errors...
|
PROMPT 5. Checking FILE_MANAGER package status...
|
||||||
|
SELECT
|
||||||
|
object_name,
|
||||||
|
object_type,
|
||||||
|
status,
|
||||||
|
TO_CHAR(last_ddl_time, 'YYYY-MM-DD HH24:MI:SS') as last_ddl_time
|
||||||
|
FROM all_objects
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND object_name = 'FILE_MANAGER'
|
||||||
|
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
|
ORDER BY object_type;
|
||||||
|
|
||||||
|
-- 6. Check for compilation errors
|
||||||
|
PROMPT
|
||||||
|
PROMPT 6. Checking for compilation errors (FILE_ARCHIVER)...
|
||||||
SELECT
|
SELECT
|
||||||
name,
|
name,
|
||||||
type,
|
type,
|
||||||
@@ -73,14 +87,33 @@ WHERE owner = 'CT_MRDS'
|
|||||||
AND name = 'FILE_ARCHIVER'
|
AND name = 'FILE_ARCHIVER'
|
||||||
ORDER BY type, sequence;
|
ORDER BY type, sequence;
|
||||||
|
|
||||||
-- 6. Verify package version
|
-- 7. Check for compilation errors (FILE_MANAGER)
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT 6. Verifying FILE_ARCHIVER version...
|
PROMPT 7. Checking for compilation errors (FILE_MANAGER)...
|
||||||
SELECT CT_MRDS.FILE_ARCHIVER.GET_VERSION() as package_version FROM DUAL;
|
SELECT
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
line,
|
||||||
|
position,
|
||||||
|
text
|
||||||
|
FROM all_errors
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND name = 'FILE_MANAGER'
|
||||||
|
ORDER BY type, sequence;
|
||||||
|
|
||||||
-- 7. Test trigger validation
|
-- 8. Verify package versions
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT 7. Testing trigger validation (should fail)...
|
PROMPT 8. Verifying package versions...
|
||||||
|
PROMPT FILE_ARCHIVER version:
|
||||||
|
SELECT CT_MRDS.FILE_ARCHIVER.GET_VERSION() as package_version FROM DUAL;
|
||||||
|
PROMPT FILE_MANAGER version:
|
||||||
|
SELECT CT_MRDS.FILE_MANAGER.GET_VERSION() as package_version FROM DUAL;
|
||||||
|
|
||||||
|
-- 9. Test trigger validation
|
||||||
|
PROMPT
|
||||||
|
PROMPT 9. Testing trigger validation (should fail)...
|
||||||
|
WHENEVER SQLERROR CONTINUE
|
||||||
|
SET SERVEROUTPUT ON
|
||||||
DECLARE
|
DECLARE
|
||||||
vTestPassed BOOLEAN := FALSE;
|
vTestPassed BOOLEAN := FALSE;
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -107,14 +140,16 @@ EXCEPTION
|
|||||||
WHEN OTHERS THEN
|
WHEN OTHERS THEN
|
||||||
IF SQLCODE = -20999 THEN
|
IF SQLCODE = -20999 THEN
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Trigger validation working correctly');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Trigger validation working correctly');
|
||||||
DBMS_OUTPUT.PUT_LINE('Expected error: ' || SQLERRM);
|
DBMS_OUTPUT.PUT_LINE('Trigger correctly rejected MINIMUM_AGE_MONTHS strategy without required value');
|
||||||
vTestPassed := TRUE;
|
vTestPassed := TRUE;
|
||||||
ELSE
|
ELSE
|
||||||
DBMS_OUTPUT.PUT_LINE('ERROR: Unexpected error: ' || SQLERRM);
|
DBMS_OUTPUT.PUT_LINE('ERROR: Unexpected error occurred during trigger validation');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Error code: ' || SQLCODE);
|
||||||
END IF;
|
END IF;
|
||||||
ROLLBACK;
|
ROLLBACK;
|
||||||
END;
|
END;
|
||||||
/
|
/
|
||||||
|
WHENEVER SQLERROR EXIT FAILURE
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT ========================================
|
PROMPT ========================================
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
--
|
--
|
||||||
-- Configuration by group:
|
-- Configuration by group:
|
||||||
-- - 19 LM tables: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 100K rows OR 1GB, 24h stats
|
-- - 19 LM tables: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 100K rows OR 1GB, 24h stats
|
||||||
-- - 2 CSDB DEBT: MINIMUM_AGE_MONTHS=6, 5 files OR 50K rows OR 512MB, 48h stats
|
-- - 2 CSDB DEBT: MINIMUM_AGE_MONTHS=0 (current month only), 5 files OR 50K rows OR 512MB, 48h stats
|
||||||
-- - 4 CSDB ratings: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 20K rows OR 256MB, 72h stats
|
-- - 4 CSDB ratings: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 20K rows OR 256MB, 72h stats
|
||||||
--
|
--
|
||||||
-- Dependencies:
|
-- Dependencies:
|
||||||
@@ -33,7 +33,7 @@ PROMPT - Triggers: 10 files OR 100,000 rows OR 1 GB
|
|||||||
PROMPT - Stats Expiration: 24 hours
|
PROMPT - Stats Expiration: 24 hours
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT CSDB DEBT Tables (2):
|
PROMPT CSDB DEBT Tables (2):
|
||||||
PROMPT - Strategy: MINIMUM_AGE_MONTHS = 6
|
PROMPT - Strategy: MINIMUM_AGE_MONTHS = 0 (current month only)
|
||||||
PROMPT - Triggers: 5 files OR 50,000 rows OR 512 MB
|
PROMPT - Triggers: 5 files OR 50,000 rows OR 512 MB
|
||||||
PROMPT - Stats Expiration: 48 hours
|
PROMPT - Stats Expiration: 48 hours
|
||||||
PROMPT
|
PROMPT
|
||||||
@@ -57,12 +57,12 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
||||||
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
||||||
ODS_SCHEMA_NAME = 'ODS',
|
ODS_SCHEMA_NAME = 'ODS',
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10,
|
ARCHIVE_THRESHOLD_FILES_COUNT = 10,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 100000,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = 100000,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 1073741824, -- 1 GB
|
ARCHIVE_THRESHOLD_BYTES_SUM = 1073741824, -- 1 GB
|
||||||
HOURS_TO_EXPIRE_STATISTICS = 24,
|
HOURS_TO_EXPIRE_STATISTICS = 24,
|
||||||
ARCHIVE_ENABLED = 'Y', -- Enable archival for all LM tables
|
IS_ARCHIVE_ENABLED = 'Y', -- Enable archival for all LM tables
|
||||||
KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
IS_KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'LM'
|
AND A_SOURCE_KEY = 'LM'
|
||||||
AND TABLE_ID IN (
|
AND TABLE_ID IN (
|
||||||
@@ -92,23 +92,23 @@ PROMPT LM tables configuration completed
|
|||||||
PROMPT
|
PROMPT
|
||||||
|
|
||||||
PROMPT =====================================================================
|
PROMPT =====================================================================
|
||||||
PROMPT SECTION 2: CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 6)
|
PROMPT SECTION 2: CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 0)
|
||||||
PROMPT =====================================================================
|
PROMPT =====================================================================
|
||||||
PROMPT Thresholds: 5 files OR 50K rows OR 512MB
|
PROMPT Thresholds: 5 files OR 50K rows OR 512MB
|
||||||
PROMPT Stats expire: 48 hours
|
PROMPT Stats expire: 48 hours
|
||||||
PROMPT =====================================================================
|
PROMPT =====================================================================
|
||||||
|
|
||||||
-- Update CSDB DEBT tables (6-month retention)
|
-- Update CSDB DEBT tables (current month only)
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
||||||
MINIMUM_AGE_MONTHS = 6,
|
MINIMUM_AGE_MONTHS = 0,
|
||||||
ODS_SCHEMA_NAME = 'ODS',
|
ODS_SCHEMA_NAME = 'ODS',
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 5,
|
ARCHIVE_THRESHOLD_FILES_COUNT = 5,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 50000,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = 50000,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 536870912, -- 512 MB
|
ARCHIVE_THRESHOLD_BYTES_SUM = 536870912, -- 512 MB
|
||||||
HOURS_TO_EXPIRE_STATISTICS = 48,
|
HOURS_TO_EXPIRE_STATISTICS = 48,
|
||||||
ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB DEBT tables
|
IS_ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB DEBT tables
|
||||||
KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
IS_KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
AND TABLE_ID IN ('CSDB_DEBT', 'CSDB_DEBT_DAILY');
|
AND TABLE_ID IN ('CSDB_DEBT', 'CSDB_DEBT_DAILY');
|
||||||
@@ -129,12 +129,12 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
||||||
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
||||||
ODS_SCHEMA_NAME = 'ODS',
|
ODS_SCHEMA_NAME = 'ODS',
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10,
|
ARCHIVE_THRESHOLD_FILES_COUNT = 10,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 20000,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = 20000,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 268435456, -- 256 MB
|
ARCHIVE_THRESHOLD_BYTES_SUM = 268435456, -- 256 MB
|
||||||
HOURS_TO_EXPIRE_STATISTICS = 72,
|
HOURS_TO_EXPIRE_STATISTICS = 72,
|
||||||
ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB rating/description tables
|
IS_ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB rating/description tables
|
||||||
KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
IS_KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
AND TABLE_ID IN (
|
AND TABLE_ID IN (
|
||||||
@@ -170,21 +170,21 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
AND MINIMUM_AGE_MONTHS = 0
|
AND MINIMUM_AGE_MONTHS = 0
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10
|
AND ARCHIVE_THRESHOLD_FILES_COUNT = 10
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 100000
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT = 100000
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 1073741824
|
AND ARCHIVE_THRESHOLD_BYTES_SUM = 1073741824
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS = 24
|
AND HOURS_TO_EXPIRE_STATISTICS = 24
|
||||||
AND ARCHIVE_ENABLED = 'Y'
|
AND IS_ARCHIVE_ENABLED = 'Y'
|
||||||
AND KEEP_IN_TRASH = 'N'
|
AND IS_KEEP_IN_TRASH = 'N'
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR'
|
ELSE 'ERROR'
|
||||||
END AS STATUS
|
END AS STATUS
|
||||||
@@ -195,28 +195,28 @@ WHERE A_SOURCE_KEY = 'LM'
|
|||||||
ORDER BY TABLE_ID;
|
ORDER BY TABLE_ID;
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 6):
|
PROMPT CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 0):
|
||||||
PROMPT
|
PROMPT
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
AND MINIMUM_AGE_MONTHS = 6
|
AND MINIMUM_AGE_MONTHS = 0
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 5
|
AND ARCHIVE_THRESHOLD_FILES_COUNT = 5
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 50000
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT = 50000
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 536870912
|
AND ARCHIVE_THRESHOLD_BYTES_SUM = 536870912
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS = 48
|
AND HOURS_TO_EXPIRE_STATISTICS = 48
|
||||||
AND ARCHIVE_ENABLED = 'Y'
|
AND IS_ARCHIVE_ENABLED = 'Y'
|
||||||
AND KEEP_IN_TRASH = 'N'
|
AND IS_KEEP_IN_TRASH = 'N'
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR'
|
ELSE 'ERROR'
|
||||||
END AS STATUS
|
END AS STATUS
|
||||||
@@ -234,21 +234,21 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
AND MINIMUM_AGE_MONTHS = 0
|
AND MINIMUM_AGE_MONTHS = 0
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10
|
AND ARCHIVE_THRESHOLD_FILES_COUNT = 10
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 20000
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT = 20000
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 268435456
|
AND ARCHIVE_THRESHOLD_BYTES_SUM = 268435456
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS = 72
|
AND HOURS_TO_EXPIRE_STATISTICS = 72
|
||||||
AND ARCHIVE_ENABLED = 'Y'
|
AND IS_ARCHIVE_ENABLED = 'Y'
|
||||||
AND KEEP_IN_TRASH = 'N'
|
AND IS_KEEP_IN_TRASH = 'N'
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR'
|
ELSE 'ERROR'
|
||||||
END AS STATUS
|
END AS STATUS
|
||||||
@@ -267,12 +267,12 @@ SELECT
|
|||||||
COUNT(*) AS TOTAL_CONFIGURED,
|
COUNT(*) AS TOTAL_CONFIGURED,
|
||||||
SUM(CASE WHEN MINIMUM_AGE_MONTHS = 0 THEN 1 ELSE 0 END) AS CURRENT_MONTH_ONLY,
|
SUM(CASE WHEN MINIMUM_AGE_MONTHS = 0 THEN 1 ELSE 0 END) AS CURRENT_MONTH_ONLY,
|
||||||
SUM(CASE WHEN MINIMUM_AGE_MONTHS > 0 THEN 1 ELSE 0 END) AS MULTI_MONTH_RETENTION,
|
SUM(CASE WHEN MINIMUM_AGE_MONTHS > 0 THEN 1 ELSE 0 END) AS MULTI_MONTH_RETENTION,
|
||||||
SUM(CASE WHEN FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NOT NULL THEN 1 ELSE 0 END) AS WITH_FILE_THRESHOLD,
|
SUM(CASE WHEN ARCHIVE_THRESHOLD_FILES_COUNT IS NOT NULL THEN 1 ELSE 0 END) AS WITH_FILE_THRESHOLD,
|
||||||
SUM(CASE WHEN ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NOT NULL THEN 1 ELSE 0 END) AS WITH_ROWS_THRESHOLD,
|
SUM(CASE WHEN ARCHIVE_THRESHOLD_ROWS_COUNT IS NOT NULL THEN 1 ELSE 0 END) AS WITH_ROWS_THRESHOLD,
|
||||||
SUM(CASE WHEN BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NOT NULL THEN 1 ELSE 0 END) AS WITH_BYTES_THRESHOLD,
|
SUM(CASE WHEN ARCHIVE_THRESHOLD_BYTES_SUM IS NOT NULL THEN 1 ELSE 0 END) AS WITH_BYTES_THRESHOLD,
|
||||||
SUM(CASE WHEN HOURS_TO_EXPIRE_STATISTICS IS NOT NULL THEN 1 ELSE 0 END) AS WITH_STATS_EXPIRY,
|
SUM(CASE WHEN HOURS_TO_EXPIRE_STATISTICS IS NOT NULL THEN 1 ELSE 0 END) AS WITH_STATS_EXPIRY,
|
||||||
SUM(CASE WHEN ARCHIVE_ENABLED = 'Y' THEN 1 ELSE 0 END) AS ARCHIVAL_ENABLED,
|
SUM(CASE WHEN IS_ARCHIVE_ENABLED = 'Y' THEN 1 ELSE 0 END) AS ARCHIVAL_ENABLED,
|
||||||
SUM(CASE WHEN KEEP_IN_TRASH = 'N' THEN 1 ELSE 0 END) AS IMMEDIATE_DELETE
|
SUM(CASE WHEN IS_KEEP_IN_TRASH = 'N' THEN 1 ELSE 0 END) AS IMMEDIATE_DELETE
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND ((A_SOURCE_KEY = 'LM' AND TABLE_ID LIKE 'LM_%')
|
AND ((A_SOURCE_KEY = 'LM' AND TABLE_ID LIKE 'LM_%')
|
||||||
@@ -306,9 +306,9 @@ SELECT
|
|||||||
COUNT(*) AS TABLE_COUNT,
|
COUNT(*) AS TABLE_COUNT,
|
||||||
MAX(ARCHIVAL_STRATEGY) AS STRATEGY,
|
MAX(ARCHIVAL_STRATEGY) AS STRATEGY,
|
||||||
MAX(MINIMUM_AGE_MONTHS) AS MIN_AGE,
|
MAX(MINIMUM_AGE_MONTHS) AS MIN_AGE,
|
||||||
MAX(FILES_COUNT_OVER_ARCHIVE_THRESHOLD) AS FILES_THRESHOLD,
|
MAX(ARCHIVE_THRESHOLD_FILES_COUNT) AS FILES_THRESHOLD,
|
||||||
MAX(ROWS_COUNT_OVER_ARCHIVE_THRESHOLD) AS ROWS_THRESHOLD,
|
MAX(ARCHIVE_THRESHOLD_ROWS_COUNT) AS ROWS_THRESHOLD,
|
||||||
ROUND(MAX(BYTES_SUM_OVER_ARCHIVE_THRESHOLD)/1048576, 0) || ' MB' AS BYTES_THRESHOLD,
|
ROUND(MAX(ARCHIVE_THRESHOLD_BYTES_SUM)/1048576, 0) || ' MB' AS BYTES_THRESHOLD,
|
||||||
MAX(HOURS_TO_EXPIRE_STATISTICS) AS STATS_HOURS
|
MAX(HOURS_TO_EXPIRE_STATISTICS) AS STATS_HOURS
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED ADD CONSTRAINT A_SOURCE_FILE_RECEIVED
|
|||||||
CHECK (PROCESSING_STATUS IN (
|
CHECK (PROCESSING_STATUS IN (
|
||||||
'RECEIVED',
|
'RECEIVED',
|
||||||
'VALIDATED',
|
'VALIDATED',
|
||||||
|
'VALIDATION_FAILED',
|
||||||
'READY_FOR_INGESTION',
|
'READY_FOR_INGESTION',
|
||||||
'INGESTED',
|
'INGESTED',
|
||||||
'ARCHIVED', -- Legacy status (backward compatibility)
|
'ARCHIVED', -- Legacy status (backward compatibility)
|
||||||
|
|||||||
@@ -0,0 +1,23 @@
|
|||||||
|
-- MARS-828: Grant EXECUTE privilege on T_FILENAME to MRDS_LOADER
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-17
|
||||||
|
-- Description: Grants EXECUTE privilege on CT_MRDS.T_FILENAME type to MRDS_LOADER user for file processing operations
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Granting EXECUTE on T_FILENAME
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- Grant EXECUTE privilege
|
||||||
|
GRANT EXECUTE ON CT_MRDS.T_FILENAME TO MRDS_LOADER;
|
||||||
|
|
||||||
|
PROMPT EXECUTE privilege on CT_MRDS.T_FILENAME granted to MRDS_LOADER
|
||||||
|
|
||||||
|
-- Verify grant
|
||||||
|
SELECT GRANTEE, PRIVILEGE, GRANTABLE
|
||||||
|
FROM USER_TAB_PRIVS
|
||||||
|
WHERE TABLE_NAME = 'T_FILENAME'
|
||||||
|
AND GRANTEE = 'MRDS_LOADER';
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT T_FILENAME privilege grant completed
|
||||||
|
PROMPT ========================================
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
--=============================================================================================================================
|
||||||
|
-- MARS-828: Install CT_MRDS.FILE_MANAGER Package Specification v3.3.2
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- Purpose: Deploy FILE_MANAGER Package Specification with MARS-828 column compatibility
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Related: MARS-828 Threshold Column Rename Compatibility
|
||||||
|
--=============================================================================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT Installing CT_MRDS.FILE_MANAGER Package Specification v3.3.2
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
@@new_version/FILE_MANAGER.pkg
|
||||||
|
|
||||||
|
-- Verify package compilation (check specific schema when installing as ADMIN)
|
||||||
|
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||||
|
FROM ALL_OBJECTS
|
||||||
|
WHERE OWNER = 'CT_MRDS'
|
||||||
|
AND OBJECT_NAME = 'FILE_MANAGER'
|
||||||
|
AND OBJECT_TYPE = 'PACKAGE';
|
||||||
|
|
||||||
|
PROMPT SUCCESS: FILE_MANAGER Package Specification v3.3.2 installed
|
||||||
|
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- End of Script
|
||||||
|
--=============================================================================================================================
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
--=============================================================================================================================
|
||||||
|
-- MARS-828: Install CT_MRDS.FILE_MANAGER Package Body v3.3.2
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- Purpose: Deploy FILE_MANAGER Package Body with MARS-828 threshold column compatibility
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Related: MARS-828 Threshold Column Rename Compatibility
|
||||||
|
--=============================================================================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT Installing CT_MRDS.FILE_MANAGER Package Body v3.3.2
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
@@new_version/FILE_MANAGER.pkb
|
||||||
|
|
||||||
|
-- Verify package compilation (check specific schema when installing as ADMIN)
|
||||||
|
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||||
|
FROM ALL_OBJECTS
|
||||||
|
WHERE OWNER = 'CT_MRDS'
|
||||||
|
AND OBJECT_NAME = 'FILE_MANAGER'
|
||||||
|
AND OBJECT_TYPE IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
|
ORDER BY OBJECT_TYPE;
|
||||||
|
|
||||||
|
-- Check for any compilation errors
|
||||||
|
SELECT 'COMPILATION ERRORS FOUND' AS WARNING
|
||||||
|
FROM ALL_ERRORS
|
||||||
|
WHERE OWNER = 'CT_MRDS'
|
||||||
|
AND NAME = 'FILE_MANAGER'
|
||||||
|
AND TYPE = 'PACKAGE BODY'
|
||||||
|
AND ROWNUM = 1;
|
||||||
|
|
||||||
|
PROMPT SUCCESS: FILE_MANAGER Package Body v3.3.2 installed
|
||||||
|
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- End of Script
|
||||||
|
--=============================================================================================================================
|
||||||
@@ -49,6 +49,7 @@ ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED ADD CONSTRAINT A_SOURCE_FILE_RECEIVED
|
|||||||
CHECK (PROCESSING_STATUS IN (
|
CHECK (PROCESSING_STATUS IN (
|
||||||
'RECEIVED',
|
'RECEIVED',
|
||||||
'VALIDATED',
|
'VALIDATED',
|
||||||
|
'VALIDATION_FAILED',
|
||||||
'READY_FOR_INGESTION',
|
'READY_FOR_INGESTION',
|
||||||
'INGESTED',
|
'INGESTED',
|
||||||
'ARCHIVED'
|
'ARCHIVED'
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
-- MARS-828: Rollback archival strategy columns
|
-- MARS-828: Rollback archival strategy columns
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2026-01-27
|
-- Date: 2026-01-27
|
||||||
-- Description: Remove ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, ARCHIVE_ENABLED, and KEEP_IN_TRASH columns
|
-- Description: Remove ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, IS_ARCHIVE_ENABLED, and IS_KEEP_IN_TRASH columns
|
||||||
|
|
||||||
PROMPT ========================================
|
PROMPT ========================================
|
||||||
PROMPT MARS-828: Removing archival strategy and config columns
|
PROMPT MARS-828: Removing archival strategy and config columns
|
||||||
@@ -12,17 +12,20 @@ ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
DROP CONSTRAINT CHK_ARCHIVAL_STRATEGY;
|
DROP CONSTRAINT CHK_ARCHIVAL_STRATEGY;
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
DROP CONSTRAINT CHK_ARCHIVE_ENABLED;
|
DROP CONSTRAINT CHK_IS_ARCHIVE_ENABLED;
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
DROP CONSTRAINT CHK_KEEP_IN_TRASH;
|
DROP CONSTRAINT CHK_IS_KEEP_IN_TRASH;
|
||||||
|
|
||||||
-- Drop columns
|
-- Drop columns
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG DROP (
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG DROP (
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS
|
||||||
ARCHIVE_ENABLED,
|
);
|
||||||
KEEP_IN_TRASH
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG DROP (
|
||||||
|
IS_ARCHIVE_ENABLED,
|
||||||
|
IS_KEEP_IN_TRASH
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Verify columns dropped
|
-- Verify columns dropped
|
||||||
@@ -31,7 +34,7 @@ SELECT
|
|||||||
FROM all_tab_columns
|
FROM all_tab_columns
|
||||||
WHERE owner = 'CT_MRDS'
|
WHERE owner = 'CT_MRDS'
|
||||||
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'ARCHIVE_ENABLED', 'KEEP_IN_TRASH');
|
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'IS_ARCHIVE_ENABLED', 'IS_KEEP_IN_TRASH');
|
||||||
|
|
||||||
PROMPT ========================================
|
PROMPT ========================================
|
||||||
PROMPT Archival strategy and config columns removed successfully
|
PROMPT Archival strategy and config columns removed successfully
|
||||||
|
|||||||
@@ -0,0 +1,47 @@
|
|||||||
|
-- MARS-828: Rollback threshold column renames
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-01-28
|
||||||
|
-- Description: Reverts threshold columns back to original naming
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Rolling back threshold column renames
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- Revert threshold columns to original names
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_DAYS TO DAYS_FOR_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_FILES_COUNT TO FILES_COUNT_OVER_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_BYTES_SUM TO BYTES_SUM_OVER_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_ROWS_COUNT TO ROWS_COUNT_OVER_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
-- Verify rollback
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Verifying threshold column rollback...
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
column_name,
|
||||||
|
data_type,
|
||||||
|
data_length
|
||||||
|
FROM all_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
|
AND (column_name LIKE '%ARCHIVE_THRESHOLD%' OR column_name LIKE 'DAYS_FOR%')
|
||||||
|
ORDER BY column_id;
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Expected original columns:
|
||||||
|
PROMPT DAYS_FOR_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT FILES_COUNT_OVER_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT BYTES_SUM_OVER_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT ROWS_COUNT_OVER_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
PROMPT Threshold column renames rolled back successfully
|
||||||
|
PROMPT ========================================
|
||||||
@@ -0,0 +1,84 @@
|
|||||||
|
-- =====================================================================
|
||||||
|
-- Script: 94b_MARS_828_rollback_column_comments.sql
|
||||||
|
-- MARS Issue: MARS-828
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Purpose: Remove column comments added by 01b_MARS_828_add_column_comments.sql
|
||||||
|
-- Description: Optional rollback - removes documentation but does not affect functionality
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Removing column comments (optional)
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Remove A_SOURCE_FILE_CONFIG Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Removing column comments from A_SOURCE_FILE_CONFIG...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_DAYS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_FILES_COUNT IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_BYTES_SUM IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_ROWS_COUNT IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ODS_SCHEMA_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.HOURS_TO_EXPIRE_STATISTICS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS '';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Remove A_SOURCE_FILE_RECEIVED Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Removing column comments from A_SOURCE_FILE_RECEIVED...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CHECKSUM IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CREATED IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.BYTES IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_YEAR IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_MONTH IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.ARCH_PATH IS '';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Verification
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Verifying column comments removed...
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
COUNT(*) as total_columns,
|
||||||
|
COUNT(CASE WHEN comments IS NOT NULL AND LENGTH(comments) > 0 THEN 1 END) as documented_columns
|
||||||
|
FROM all_col_comments
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name IN ('A_SOURCE_FILE_CONFIG', 'A_SOURCE_FILE_RECEIVED')
|
||||||
|
GROUP BY table_name
|
||||||
|
ORDER BY table_name;
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Column comments removed successfully
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT NOTE: This is an optional rollback step
|
||||||
|
PROMPT Database functionality is not affected
|
||||||
|
PROMPT ========================================
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
-- MARS-828 ROLLBACK: Revoke EXECUTE privilege on T_FILENAME from MRDS_LOADER
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-17
|
||||||
|
-- Description: Revokes EXECUTE privilege on CT_MRDS.T_FILENAME type from MRDS_LOADER user
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Revoking EXECUTE on T_FILENAME
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- Revoke EXECUTE privilege
|
||||||
|
REVOKE EXECUTE ON CT_MRDS.T_FILENAME FROM MRDS_LOADER;
|
||||||
|
|
||||||
|
PROMPT EXECUTE privilege on CT_MRDS.T_FILENAME revoked from MRDS_LOADER
|
||||||
|
|
||||||
|
-- Verify revocation
|
||||||
|
SELECT GRANTEE, PRIVILEGE, GRANTABLE
|
||||||
|
FROM USER_TAB_PRIVS
|
||||||
|
WHERE TABLE_NAME = 'T_FILENAME'
|
||||||
|
AND GRANTEE = 'MRDS_LOADER';
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT T_FILENAME privilege revocation completed
|
||||||
|
PROMPT ========================================
|
||||||
@@ -10,9 +10,9 @@
|
|||||||
-- archival parameters back to NULL (unconfigured state):
|
-- archival parameters back to NULL (unconfigured state):
|
||||||
-- - ARCHIVAL_STRATEGY
|
-- - ARCHIVAL_STRATEGY
|
||||||
-- - MINIMUM_AGE_MONTHS
|
-- - MINIMUM_AGE_MONTHS
|
||||||
-- - FILES_COUNT_OVER_ARCHIVE_THRESHOLD
|
-- - ARCHIVE_THRESHOLD_FILES_COUNT
|
||||||
-- - ROWS_COUNT_OVER_ARCHIVE_THRESHOLD
|
-- - ARCHIVE_THRESHOLD_ROWS_COUNT
|
||||||
-- - BYTES_SUM_OVER_ARCHIVE_THRESHOLD
|
-- - ARCHIVE_THRESHOLD_BYTES_SUM
|
||||||
-- - HOURS_TO_EXPIRE_STATISTICS
|
-- - HOURS_TO_EXPIRE_STATISTICS
|
||||||
--
|
--
|
||||||
-- This script reverts changes made by:
|
-- This script reverts changes made by:
|
||||||
@@ -47,9 +47,9 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = NULL,
|
SET ARCHIVAL_STRATEGY = NULL,
|
||||||
MINIMUM_AGE_MONTHS = NULL,
|
MINIMUM_AGE_MONTHS = NULL,
|
||||||
ODS_SCHEMA_NAME = NULL,
|
ODS_SCHEMA_NAME = NULL,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_FILES_COUNT = NULL,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = NULL,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_BYTES_SUM = NULL,
|
||||||
HOURS_TO_EXPIRE_STATISTICS = NULL
|
HOURS_TO_EXPIRE_STATISTICS = NULL
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'LM'
|
AND A_SOURCE_KEY = 'LM'
|
||||||
@@ -88,9 +88,9 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = NULL,
|
SET ARCHIVAL_STRATEGY = NULL,
|
||||||
MINIMUM_AGE_MONTHS = NULL,
|
MINIMUM_AGE_MONTHS = NULL,
|
||||||
ODS_SCHEMA_NAME = NULL,
|
ODS_SCHEMA_NAME = NULL,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_FILES_COUNT = NULL,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = NULL,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_BYTES_SUM = NULL,
|
||||||
HOURS_TO_EXPIRE_STATISTICS = NULL
|
HOURS_TO_EXPIRE_STATISTICS = NULL
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
@@ -109,9 +109,9 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = NULL,
|
SET ARCHIVAL_STRATEGY = NULL,
|
||||||
MINIMUM_AGE_MONTHS = NULL,
|
MINIMUM_AGE_MONTHS = NULL,
|
||||||
ODS_SCHEMA_NAME = NULL,
|
ODS_SCHEMA_NAME = NULL,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_FILES_COUNT = NULL,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = NULL,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_BYTES_SUM = NULL,
|
||||||
HOURS_TO_EXPIRE_STATISTICS = NULL
|
HOURS_TO_EXPIRE_STATISTICS = NULL
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
@@ -148,16 +148,16 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY IS NULL
|
WHEN ARCHIVAL_STRATEGY IS NULL
|
||||||
AND MINIMUM_AGE_MONTHS IS NULL
|
AND MINIMUM_AGE_MONTHS IS NULL
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_FILES_COUNT IS NULL
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT IS NULL
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_BYTES_SUM IS NULL
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR - Still configured'
|
ELSE 'ERROR - Still configured'
|
||||||
@@ -176,16 +176,16 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY IS NULL
|
WHEN ARCHIVAL_STRATEGY IS NULL
|
||||||
AND MINIMUM_AGE_MONTHS IS NULL
|
AND MINIMUM_AGE_MONTHS IS NULL
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_FILES_COUNT IS NULL
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT IS NULL
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_BYTES_SUM IS NULL
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR - Still configured'
|
ELSE 'ERROR - Still configured'
|
||||||
@@ -204,16 +204,16 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY IS NULL
|
WHEN ARCHIVAL_STRATEGY IS NULL
|
||||||
AND MINIMUM_AGE_MONTHS IS NULL
|
AND MINIMUM_AGE_MONTHS IS NULL
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_FILES_COUNT IS NULL
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT IS NULL
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_BYTES_SUM IS NULL
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR - Still configured'
|
ELSE 'ERROR - Still configured'
|
||||||
|
|||||||
@@ -0,0 +1,10 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-828: Rollback FILE_MANAGER Package Specification to v3.3.1
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Restore previous package specification version (pre-threshold column rename compatibility)
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- WARNING: This removes MARS-828 threshold column compatibility from FILE_MANAGER
|
||||||
|
-- ===================================================================
|
||||||
|
|
||||||
|
@@rollback_version/FILE_MANAGER.pkg
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-828: Rollback FILE_MANAGER Package Body to v3.3.1
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Restore previous package body version (pre-threshold column rename compatibility)
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- WARNING: This removes MARS-828 threshold column compatibility from FILE_MANAGER
|
||||||
|
-- ===================================================================
|
||||||
|
|
||||||
|
@@rollback_version/FILE_MANAGER.pkb
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
-- MARS-828 Master Installation Script
|
-- MARS-828 Master Installation Script
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
-- Purpose: Deploy enhanced archival strategies for FILE_ARCHIVER package
|
-- Purpose: Deploy enhanced archival strategies for FILE_ARCHIVER package
|
||||||
-- Target Schema: CT_MRDS
|
-- Target Schema: CT_MRDS
|
||||||
-- Estimated Time: 2-3 minutes
|
-- Estimated Time: 2-3 minutes
|
||||||
@@ -34,10 +35,10 @@ PROMPT
|
|||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
PROMPT MARS-828 Installation Starting
|
PROMPT MARS-828 Installation Starting
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
PROMPT Package: CT_MRDS.FILE_ARCHIVER
|
PROMPT Package: CT_MRDS.FILE_ARCHIVER v3.3.0 + CT_MRDS.FILE_MANAGER v3.3.2
|
||||||
PROMPT Change: Enhanced archival strategies (MINIMUM_AGE_MONTHS, HYBRID) + TRASH retention + Selective archiving
|
PROMPT Change: Enhanced archival strategies (MINIMUM_AGE_MONTHS, HYBRID) + TRASH retention + Selective archiving + FILE_MANAGER compatibility
|
||||||
PROMPT Purpose: Flexible archival policies per data source with file retention and config-based control
|
PROMPT Purpose: Flexible archival policies per data source with file retention and config-based control
|
||||||
PROMPT Steps: 9 (DDL, Trigger, Statuses, Package v3.3.0, Verify, Track, Configure)
|
PROMPT Steps: 14 (DDL, Rename, Comments, Trigger, Statuses, Grants, Packages, Verify, Track, Configure)
|
||||||
PROMPT Timestamp:
|
PROMPT Timestamp:
|
||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_start FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_start FROM DUAL;
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
@@ -55,47 +56,72 @@ WHENEVER SQLERROR CONTINUE
|
|||||||
|
|
||||||
-- Installation steps
|
-- Installation steps
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 1/9: Adding archival strategy and config columns to A_SOURCE_FILE_CONFIG
|
PROMPT Step 1/12: Adding archival strategy and config columns to A_SOURCE_FILE_CONFIG
|
||||||
PROMPT =============================================================================
|
PROMPT ==============================================================================
|
||||||
@@01_MARS_828_install_add_archival_strategy_columns.sql
|
@@01_MARS_828_install_add_archival_strategy_columns.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 2/9: Creating validation trigger
|
PROMPT Step 2/12: Renaming threshold columns for consistent naming
|
||||||
|
PROMPT ==========================================================
|
||||||
|
@@01a_MARS_828_rename_threshold_columns.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 3/12: Adding comprehensive column comments
|
||||||
|
PROMPT ===============================================
|
||||||
|
@@01b_MARS_828_add_column_comments.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 4/12: Creating validation trigger
|
||||||
PROMPT ======================================
|
PROMPT ======================================
|
||||||
@@02_MARS_828_install_archival_strategy_trigger.sql
|
@@02_MARS_828_install_archival_strategy_trigger.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 3/9: Adding TRASH retention statuses to A_SOURCE_FILE_RECEIVED
|
PROMPT Step 5/12: Adding TRASH retention statuses to A_SOURCE_FILE_RECEIVED
|
||||||
PROMPT =====================================================================
|
PROMPT ===================================================================
|
||||||
@@07_MARS_828_install_add_trash_retention_statuses.sql
|
@@07_MARS_828_install_add_trash_retention_statuses.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 4/9: Deploying FILE_ARCHIVER Package Specification v3.3.0
|
PROMPT Step 6/12: Granting privileges on T_FILENAME to MRDS_LOADER
|
||||||
PROMPT ================================================================
|
PROMPT ==========================================================
|
||||||
|
@@08_MARS_828_install_grant_t_filename.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 7/12: Deploying FILE_ARCHIVER Package Specification v3.3.0
|
||||||
|
PROMPT ==============================================================
|
||||||
@@03_MARS_828_install_CT_MRDS_FILE_ARCHIVER_SPEC.sql
|
@@03_MARS_828_install_CT_MRDS_FILE_ARCHIVER_SPEC.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 5/9: Deploying FILE_ARCHIVER Package Body v3.3.0
|
PROMPT Step 8/14: Deploying FILE_ARCHIVER Package Body v3.3.0
|
||||||
PROMPT ======================================================
|
PROMPT ====================================================
|
||||||
@@04_MARS_828_install_CT_MRDS_FILE_ARCHIVER_BODY.sql
|
@@04_MARS_828_install_CT_MRDS_FILE_ARCHIVER_BODY.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 6/9: Verifying installation
|
PROMPT Step 9/14: Deploying FILE_MANAGER Package Specification v3.3.2
|
||||||
PROMPT =================================
|
PROMPT =============================================================
|
||||||
|
@@09_MARS_828_install_CT_MRDS_FILE_MANAGER_SPEC.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 10/14: Deploying FILE_MANAGER Package Body v3.3.2
|
||||||
|
PROMPT ===================================================
|
||||||
|
@@10_MARS_828_install_CT_MRDS_FILE_MANAGER_BODY.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 11/14: Verifying installation
|
||||||
|
PROMPT ==================================
|
||||||
@@05_MARS_828_verify_installation.sql
|
@@05_MARS_828_verify_installation.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 7/9: Tracking package versions
|
PROMPT Step 12/14: Tracking package versions
|
||||||
PROMPT ====================================
|
PROMPT =====================================
|
||||||
@@track_package_versions.sql
|
@@track_package_versions.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 8/9: Verifying tracked packages
|
PROMPT Step 13/14: Verifying tracked packages
|
||||||
PROMPT =====================================
|
PROMPT ======================================
|
||||||
@@verify_packages_version.sql
|
@@verify_packages_version.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 9/9: Configuring Release 01 tables archival strategies
|
PROMPT Step 14/14: Configuring Release 01 tables archival strategies
|
||||||
PROMPT ============================================================
|
PROMPT ============================================================
|
||||||
@@06_MARS_828_configure_release01_tables.sql
|
@@06_MARS_828_configure_release01_tables.sql
|
||||||
|
|
||||||
@@ -107,12 +133,13 @@ PROMPT Completion Time:
|
|||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_end FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_end FROM DUAL;
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Installation Summary:
|
PROMPT Installation Summary:
|
||||||
PROMPT - Package: CT_MRDS.FILE_ARCHIVER
|
PROMPT - Packages Installed:
|
||||||
PROMPT - Version: 3.3.0 (includes selective archiving and config-based TRASH policy)
|
PROMPT * CT_MRDS.FILE_ARCHIVER v3.3.0 (includes selective archiving and config-based TRASH policy)
|
||||||
|
PROMPT * CT_MRDS.FILE_MANAGER v3.3.2 (compatible with MARS-828 threshold column renames)
|
||||||
PROMPT - Strategies: THRESHOLD_BASED (default), MINIMUM_AGE_MONTHS (0=current month), HYBRID
|
PROMPT - Strategies: THRESHOLD_BASED (default), MINIMUM_AGE_MONTHS (0=current month), HYBRID
|
||||||
PROMPT - Selective Archiving: ARCHIVE_ENABLED column (Y=archive, N=skip)
|
PROMPT - Selective Archiving: IS_ARCHIVE_ENABLED column (Y=archive, N=skip)
|
||||||
PROMPT - TRASH Policy: KEEP_IN_TRASH column (Y=keep files, N=delete immediately)
|
PROMPT - TRASH Policy: IS_KEEP_IN_TRASH column (Y=keep files, N=delete immediately)
|
||||||
PROMPT * Default: ARCHIVE_ENABLED='Y', KEEP_IN_TRASH='N' (archiving enabled, immediate deletion)
|
PROMPT * Default: IS_ARCHIVE_ENABLED='Y', IS_KEEP_IN_TRASH='N' (archiving enabled, immediate deletion)
|
||||||
PROMPT * TRASH is a subfolder in DATA bucket (e.g., TRASH/LM/TABLE_NAME)
|
PROMPT * TRASH is a subfolder in DATA bucket (e.g., TRASH/LM/TABLE_NAME)
|
||||||
PROMPT * No more pKeepInTrash parameter - policy from config only
|
PROMPT * No more pKeepInTrash parameter - policy from config only
|
||||||
PROMPT - New Procedure: ARCHIVE_ALL_FOR_SOURCE(pSourceKey) for batch processing
|
PROMPT - New Procedure: ARCHIVE_ALL_FOR_SOURCE(pSourceKey) for batch processing
|
||||||
|
|||||||
@@ -16,20 +16,20 @@ CREATE TABLE CT_MRDS.A_SOURCE_FILE_CONFIG (
|
|||||||
TABLE_ID VARCHAR2(200),
|
TABLE_ID VARCHAR2(200),
|
||||||
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
||||||
CONTAINER_FILE_KEY NUMBER(38,0),
|
CONTAINER_FILE_KEY NUMBER(38,0),
|
||||||
DAYS_FOR_ARCHIVE_THRESHOLD NUMBER(4,0),
|
ARCHIVE_THRESHOLD_DAYS NUMBER(4,0),
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD NUMBER(38,0),
|
ARCHIVE_THRESHOLD_FILES_COUNT NUMBER(38,0),
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD NUMBER(38,0),
|
ARCHIVE_THRESHOLD_BYTES_SUM NUMBER(38,0),
|
||||||
ODS_SCHEMA_NAME VARCHAR2(100),
|
ODS_SCHEMA_NAME VARCHAR2(100),
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD NUMBER(38,0),
|
ARCHIVE_THRESHOLD_ROWS_COUNT NUMBER(38,0),
|
||||||
HOURS_TO_EXPIRE_STATISTICS NUMBER(38,3),
|
HOURS_TO_EXPIRE_STATISTICS NUMBER(38,3),
|
||||||
ARCHIVAL_STRATEGY VARCHAR2(50),
|
ARCHIVAL_STRATEGY VARCHAR2(50),
|
||||||
MINIMUM_AGE_MONTHS NUMBER(3,0),
|
MINIMUM_AGE_MONTHS NUMBER(3,0),
|
||||||
ENCODING VARCHAR2(50) DEFAULT 'UTF8',
|
ENCODING VARCHAR2(50) DEFAULT 'UTF8',
|
||||||
ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
IS_ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
||||||
KEEP_IN_TRASH CHAR(1) DEFAULT 'N' NOT NULL,
|
IS_KEEP_IN_TRASH CHAR(1) DEFAULT 'N' NOT NULL,
|
||||||
CONSTRAINT A_SOURCE_FILE_CONFIG_PK PRIMARY KEY (A_SOURCE_FILE_CONFIG_KEY),
|
CONSTRAINT A_SOURCE_FILE_CONFIG_PK PRIMARY KEY (A_SOURCE_FILE_CONFIG_KEY),
|
||||||
CONSTRAINT CHK_ARCHIVE_ENABLED CHECK (ARCHIVE_ENABLED IN ('Y', 'N')),
|
CONSTRAINT CHK_IS_ARCHIVE_ENABLED CHECK (IS_ARCHIVE_ENABLED IN ('Y', 'N')),
|
||||||
CONSTRAINT CHK_KEEP_IN_TRASH CHECK (KEEP_IN_TRASH IN ('Y', 'N')),
|
CONSTRAINT CHK_IS_KEEP_IN_TRASH CHECK (IS_KEEP_IN_TRASH IN ('Y', 'N')),
|
||||||
CONSTRAINT SOURCE_FILE_TYPE_CHK CHECK (SOURCE_FILE_TYPE IN ('INPUT', 'CONTAINER', 'LOAD_CONFIG')),
|
CONSTRAINT SOURCE_FILE_TYPE_CHK CHECK (SOURCE_FILE_TYPE IN ('INPUT', 'CONTAINER', 'LOAD_CONFIG')),
|
||||||
CONSTRAINT ASFC_A_SOURCE_KEY_FK FOREIGN KEY(A_SOURCE_KEY) REFERENCES CT_MRDS.A_SOURCE(A_SOURCE_KEY),
|
CONSTRAINT ASFC_A_SOURCE_KEY_FK FOREIGN KEY(A_SOURCE_KEY) REFERENCES CT_MRDS.A_SOURCE(A_SOURCE_KEY),
|
||||||
CONSTRAINT ASFC_CONTAINER_FILE_KEY_FK FOREIGN KEY(CONTAINER_FILE_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
CONSTRAINT ASFC_CONTAINER_FILE_KEY_FK FOREIGN KEY(CONTAINER_FILE_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||||
@@ -47,10 +47,64 @@ ON "CT_MRDS"."A_SOURCE_FILE_CONFIG" ("SOURCE_FILE_TYPE", "SOURCE_FILE_ID", "TABL
|
|||||||
TABLESPACE "DATA";
|
TABLESPACE "DATA";
|
||||||
|
|
||||||
-- Column comments
|
-- Column comments
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS 'Archival strategy: THRESHOLD_BASED, CURRENT_MONTH_ONLY, MINIMUM_AGE_MONTHS, HYBRID. Added in MARS-828';
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS 'Minimum age in months before archival (required for MINIMUM_AGE_MONTHS strategy). Added in MARS-828';
|
'Primary key - unique identifier for source file configuration record';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS 'Oracle character set name for CSV files (e.g., UTF8, WE8MSWIN1252, EE8ISO8859P2). Added in MARS-1049';
|
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_ENABLED IS 'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process. Added in MARS-828 v3.3.0';
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY IS
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH IS 'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy. Added in MARS-828 v3.3.0';
|
'Foreign key to A_SOURCE table - identifies the source system (e.g., LM, C2D, CSDB)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE IS
|
||||||
|
'Type of file configuration: INPUT (data files), CONTAINER (xml files), or LOAD_CONFIG (configuration files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID IS
|
||||||
|
'Unique identifier for the source file within the source system (e.g., UC_DISSEM, STANDING_FACILITIES)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC IS
|
||||||
|
'Human-readable description of the source file and its purpose';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN IS
|
||||||
|
'Filename pattern for matching incoming files (supports wildcards, e.g., UC_NMA_DISSEM-*.csv)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID IS
|
||||||
|
'Identifier for the target table where data will be loaded (without schema prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME IS
|
||||||
|
'Fully qualified name of template table in CT_ET_TEMPLATES schema used for external table creation';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY IS
|
||||||
|
'Foreign key to parent container configuration when this file is part of an xml (NULL for standalone files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_DAYS IS
|
||||||
|
'Threshold for THRESHOLD_BASED strategy: archive data older than N days';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_FILES_COUNT IS
|
||||||
|
'Trigger archival when file count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_BYTES_SUM IS
|
||||||
|
'Trigger archival when total size in bytes exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_ROWS_COUNT IS
|
||||||
|
'Trigger archival when total row count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ODS_SCHEMA_NAME IS
|
||||||
|
'Schema name where ODS external tables are created (typically ODS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.HOURS_TO_EXPIRE_STATISTICS IS
|
||||||
|
'Number of hours before table statistics expire and need to be recalculated';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
||||||
|
'Archival strategy: THRESHOLD_BASED (days-based), MINIMUM_AGE_MONTHS (0=current month, N=retain N months), HYBRID (combination)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
||||||
|
'Minimum age in months before archival (required for MINIMUM_AGE_MONTHS and HYBRID strategies, 0=current month only)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS
|
||||||
|
'Oracle character set name for CSV files (e.g., UTF8, WE8MSWIN1252, EE8ISO8859P2)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS
|
||||||
|
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS
|
||||||
|
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy';
|
||||||
|
|
||||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_CONFIG TO MRDS_LOADER_ROLE;
|
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_CONFIG TO MRDS_LOADER_ROLE;
|
||||||
@@ -26,4 +26,41 @@ CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
|||||||
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||||
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||||
|
|
||||||
|
-- Column comments
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY IS
|
||||||
|
'Primary key - unique identifier for received file record';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
|
'Foreign key to A_SOURCE_FILE_CONFIG - links file to its configuration and processing rules';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME IS
|
||||||
|
'Full object name/path of the received file in OCI Object Storage (includes INBOX prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CHECKSUM IS
|
||||||
|
'MD5 checksum of file content for integrity verification and duplicate detection';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CREATED IS
|
||||||
|
'Timestamp with timezone when file was created/uploaded to Object Storage (from DBMS_CLOUD.LIST_OBJECTS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.BYTES IS
|
||||||
|
'File size in bytes';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE IS
|
||||||
|
'Date when file was registered in the system (extracted from CREATED timestamp)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS IS
|
||||||
|
'Current processing status: RECEIVED → VALIDATED → READY_FOR_INGESTION → INGESTED → ARCHIVED_AND_TRASHED → ARCHIVED_AND_PURGED';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME IS
|
||||||
|
'Name of temporary external table created for file validation (dropped after validation)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_YEAR IS
|
||||||
|
'Year partition value (YYYY format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_MONTH IS
|
||||||
|
'Month partition value (MM format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.ARCH_FILE_NAME IS
|
||||||
|
'Archive directory prefix in ARCHIVE bucket containing archived Parquet files (supports multiple files from parallel DBMS_CLOUD.EXPORT_DATA)';
|
||||||
|
|
||||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||||
@@ -21,7 +21,7 @@ AS
|
|||||||
CASE pSourceFileConfig.ARCHIVAL_STRATEGY
|
CASE pSourceFileConfig.ARCHIVAL_STRATEGY
|
||||||
-- Legacy threshold-based strategy (backward compatible)
|
-- Legacy threshold-based strategy (backward compatible)
|
||||||
WHEN 'THRESHOLD_BASED' THEN
|
WHEN 'THRESHOLD_BASED' THEN
|
||||||
vWhereClause := 'extract(day from (systimestamp - workflow_start)) > ' || pSourceFileConfig.DAYS_FOR_ARCHIVE_THRESHOLD;
|
vWhereClause := 'extract(day from (systimestamp - workflow_start)) > ' || pSourceFileConfig.ARCHIVE_THRESHOLD_DAYS;
|
||||||
|
|
||||||
-- Archive data older than X months (0 = current month only)
|
-- Archive data older than X months (0 = current month only)
|
||||||
WHEN 'MINIMUM_AGE_MONTHS' THEN
|
WHEN 'MINIMUM_AGE_MONTHS' THEN
|
||||||
@@ -113,15 +113,15 @@ AS
|
|||||||
vSourceFileConfig := CT_MRDS.FILE_MANAGER.GET_SOURCE_FILE_CONFIG(pSourceFileConfigKey => pSourceFileConfigKey);
|
vSourceFileConfig := CT_MRDS.FILE_MANAGER.GET_SOURCE_FILE_CONFIG(pSourceFileConfigKey => pSourceFileConfigKey);
|
||||||
|
|
||||||
-- Check if archiving is enabled for this configuration
|
-- Check if archiving is enabled for this configuration
|
||||||
IF vSourceFileConfig.ARCHIVE_ENABLED = 'N' THEN
|
IF vSourceFileConfig.IS_ARCHIVE_ENABLED = 'N' THEN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archiving disabled for this configuration (ARCHIVE_ENABLED=N). Skipping.', 'WARNING', vParameters);
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archiving disabled for this configuration (IS_ARCHIVE_ENABLED=N). Skipping.', 'WARNING', vParameters);
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||||
RETURN;
|
RETURN;
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Get TRASH policy from configuration
|
-- Get TRASH policy from configuration
|
||||||
vKeepInTrash := (vSourceFileConfig.KEEP_IN_TRASH = 'Y');
|
vKeepInTrash := (vSourceFileConfig.IS_KEEP_IN_TRASH = 'Y');
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('TRASH policy from config: KEEP_IN_TRASH=' || vSourceFileConfig.KEEP_IN_TRASH, 'INFO', vParameters);
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('TRASH policy from config: IS_KEEP_IN_TRASH=' || vSourceFileConfig.IS_KEEP_IN_TRASH, 'INFO', vParameters);
|
||||||
|
|
||||||
vTableStat := GET_TABLE_STAT(pSourceFileConfigKey => pSourceFileConfigKey);
|
vTableStat := GET_TABLE_STAT(pSourceFileConfigKey => pSourceFileConfigKey);
|
||||||
|
|
||||||
@@ -142,9 +142,9 @@ AS
|
|||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archival strategy: MINIMUM_AGE_MONTHS (threshold-independent)','INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archival strategy: MINIMUM_AGE_MONTHS (threshold-independent)','INFO');
|
||||||
ELSE
|
ELSE
|
||||||
-- THRESHOLD_BASED and HYBRID: Check thresholds
|
-- THRESHOLD_BASED and HYBRID: Check thresholds
|
||||||
if vTableStat.OVER_ARCH_THRESOLD_FILE_COUNT >= vSourceFileConfig.FILES_COUNT_OVER_ARCHIVE_THRESHOLD then vArchivalTriggeredBy := 'FILES_COUNT';
|
if vTableStat.OVER_ARCH_THRESOLD_FILE_COUNT >= vSourceFileConfig.ARCHIVE_THRESHOLD_FILES_COUNT then vArchivalTriggeredBy := 'FILES_COUNT';
|
||||||
elsif vTableStat.OVER_ARCH_THRESOLD_ROW_COUNT >= vSourceFileConfig.ROWS_COUNT_OVER_ARCHIVE_THRESHOLD then vArchivalTriggeredBy := vArchivalTriggeredBy||', ROWS_COUNT';
|
elsif vTableStat.OVER_ARCH_THRESOLD_ROW_COUNT >= vSourceFileConfig.ARCHIVE_THRESHOLD_ROWS_COUNT then vArchivalTriggeredBy := vArchivalTriggeredBy||', ROWS_COUNT';
|
||||||
elsif vTableStat.OVER_ARCH_THRESOLD_SIZE >= vSourceFileConfig.BYTES_SUM_OVER_ARCHIVE_THRESHOLD then vArchivalTriggeredBy := vArchivalTriggeredBy||', BYTES_SUM';
|
elsif vTableStat.OVER_ARCH_THRESOLD_SIZE >= vSourceFileConfig.ARCHIVE_THRESHOLD_BYTES_SUM then vArchivalTriggeredBy := vArchivalTriggeredBy||', BYTES_SUM';
|
||||||
else CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Non of archival triggers reached','INFO');
|
else CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Non of archival triggers reached','INFO');
|
||||||
end if;
|
end if;
|
||||||
END IF;
|
END IF;
|
||||||
@@ -166,6 +166,7 @@ AS
|
|||||||
join CT_MRDS.a_workflow_history h
|
join CT_MRDS.a_workflow_history h
|
||||||
on s.a_workflow_history_key = h.a_workflow_history_key
|
on s.a_workflow_history_key = h.a_workflow_history_key
|
||||||
where ' || GET_ARCHIVAL_WHERE_CLAUSE(vSourceFileConfig) || '
|
where ' || GET_ARCHIVAL_WHERE_CLAUSE(vSourceFileConfig) || '
|
||||||
|
and h.WORKFLOW_SUCCESSFUL = ''Y''
|
||||||
group by file$name, file$path, to_char(h.workflow_start,''yyyy''), to_char(h.workflow_start,''mm'')'
|
group by file$name, file$path, to_char(h.workflow_start,''yyyy''), to_char(h.workflow_start,''mm'')'
|
||||||
;
|
;
|
||||||
|
|
||||||
@@ -182,14 +183,14 @@ AS
|
|||||||
join CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
join CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
on s.file$name = r.source_file_name
|
on s.file$name = r.source_file_name
|
||||||
and r.a_source_file_config_key = '||pSourceFileConfigKey||'
|
and r.a_source_file_config_key = '||pSourceFileConfigKey||'
|
||||||
and r.PROCESSING_STATUS = ''INGESTED''
|
|
||||||
join CT_MRDS.a_workflow_history h
|
join CT_MRDS.a_workflow_history h
|
||||||
on s.a_workflow_history_key = h.a_workflow_history_key
|
on s.a_workflow_history_key = h.a_workflow_history_key
|
||||||
and to_char(h.workflow_start,''yyyy'') = '''||ym_loop.year||'''
|
and to_char(h.workflow_start,''yyyy'') = '''||ym_loop.year||'''
|
||||||
and to_char(h.workflow_start,''mm'') = '''||ym_loop.month||'''
|
and to_char(h.workflow_start,''mm'') = '''||ym_loop.month||'''
|
||||||
|
and h.WORKFLOW_SUCCESSFUL = ''Y''
|
||||||
'
|
'
|
||||||
;
|
;
|
||||||
vUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE')||vSourceFileConfig.A_SOURCE_KEY||'/'||vSourceFileConfig.TABLE_ID||'/PARTITION_YEAR='||ym_loop.year||'/PARTITION_MONTH='||ym_loop.month||'/';
|
vUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE')||'ARCHIVE/'||vSourceFileConfig.A_SOURCE_KEY||'/'||vSourceFileConfig.TABLE_ID||'/PARTITION_YEAR='||ym_loop.year||'/PARTITION_MONTH='||ym_loop.month||'/';
|
||||||
|
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Start Archiving for YEAR_MONTH: '||ym_loop.year||'_'||ym_loop.month ,'INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Start Archiving for YEAR_MONTH: '||ym_loop.year||'_'||ym_loop.month ,'INFO');
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Parameter for DBMS_CLOUD.EXPORT_DATA => file_uri_list' ,'DEBUG',vUri);
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Parameter for DBMS_CLOUD.EXPORT_DATA => file_uri_list' ,'DEBUG',vUri);
|
||||||
@@ -234,7 +235,7 @@ AS
|
|||||||
|
|
||||||
-- Note: DBMS_CLOUD.EXPORT_DATA may create multiple parquet files (parallel execution)
|
-- Note: DBMS_CLOUD.EXPORT_DATA may create multiple parquet files (parallel execution)
|
||||||
-- Instead of tracking individual files, we store the archive directory prefix
|
-- Instead of tracking individual files, we store the archive directory prefix
|
||||||
-- ARCH_FILE_NAME will contain the directory URI where all parquet files are located
|
-- ARCH_PATH contain the directory URI where all parquet files are located
|
||||||
vFilename := vUri; -- Store directory prefix instead of individual filename
|
vFilename := vUri; -- Store directory prefix instead of individual filename
|
||||||
|
|
||||||
-- Try to drop EXPORTED FILES ("regular data files")
|
-- Try to drop EXPORTED FILES ("regular data files")
|
||||||
@@ -245,7 +246,7 @@ AS
|
|||||||
BEGIN
|
BEGIN
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED' -- Status reflects file is archived and kept in TRASH
|
SET PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED' -- Status reflects file is archived and kept in TRASH
|
||||||
,ARCH_FILE_NAME = vFilename -- Now contains directory prefix, not individual file
|
,ARCH_PATH = vFilename -- Now contains directory prefix, not individual file
|
||||||
,PARTITION_YEAR = ym_loop.year -- Record which partition year the data was archived to
|
,PARTITION_YEAR = ym_loop.year -- Record which partition year the data was archived to
|
||||||
,PARTITION_MONTH = ym_loop.month -- Record which partition month the data was archived to
|
,PARTITION_MONTH = ym_loop.month -- Record which partition month the data was archived to
|
||||||
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
||||||
@@ -296,10 +297,10 @@ AS
|
|||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
AND r.PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
AND r.PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
||||||
END LOOP;
|
END LOOP;
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('All archived files removed from TRASH folder and marked as ARCHIVED_AND_PURGED (config: KEEP_IN_TRASH=N).','INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('All archived files removed from TRASH folder and marked as ARCHIVED_AND_PURGED (config: IS_KEEP_IN_TRASH=N).','INFO');
|
||||||
ELSE
|
ELSE
|
||||||
-- Keep files in TRASH folder (status remains ARCHIVED_AND_TRASHED)
|
-- Keep files in TRASH folder (status remains ARCHIVED_AND_TRASHED)
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archived files kept in TRASH folder for retention (config: KEEP_IN_TRASH=Y, status: ARCHIVED_AND_TRASHED).','INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archived files kept in TRASH folder for retention (config: IS_KEEP_IN_TRASH=Y, status: ARCHIVED_AND_TRASHED).','INFO');
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
--ROLLBACK PART
|
--ROLLBACK PART
|
||||||
@@ -324,7 +325,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'INGESTED'
|
SET PROCESSING_STATUS = 'INGESTED'
|
||||||
,ARCH_FILE_NAME = NULL
|
,ARCH_PATH = NULL
|
||||||
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
;
|
;
|
||||||
@@ -483,7 +484,7 @@ AS
|
|||||||
,sum(case when ' || vWhereClause || ' then row_count_per_file else 0 end) as OLD_ROW_COUNT
|
,sum(case when ' || vWhereClause || ' then row_count_per_file else 0 end) as OLD_ROW_COUNT
|
||||||
,sum(r.bytes) as BYTES
|
,sum(r.bytes) as BYTES
|
||||||
,sum(case when ' || vWhereClause || ' then r.bytes else 0 end) as OLD_BYTES
|
,sum(case when ' || vWhereClause || ' then r.bytes else 0 end) as OLD_BYTES
|
||||||
,'||COALESCE(TO_CHAR(vSourceFileConfig.DAYS_FOR_ARCHIVE_THRESHOLD), 'NULL')||' as DAYS_FOR_ARCHIVE_THRESHOLD
|
,'||COALESCE(TO_CHAR(vSourceFileConfig.ARCHIVE_THRESHOLD_DAYS), 'NULL')||' as ARCHIVE_THRESHOLD_DAYS
|
||||||
,systimestamp as CREATED
|
,systimestamp as CREATED
|
||||||
from tmp_gr t
|
from tmp_gr t
|
||||||
join (SELECT * from DBMS_CLOUD.LIST_OBJECTS(
|
join (SELECT * from DBMS_CLOUD.LIST_OBJECTS(
|
||||||
@@ -579,7 +580,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
SET PROCESSING_STATUS = 'INGESTED',
|
SET PROCESSING_STATUS = 'INGESTED',
|
||||||
ARCH_FILE_NAME = NULL,
|
ARCH_PATH = NULL,
|
||||||
PARTITION_YEAR = NULL,
|
PARTITION_YEAR = NULL,
|
||||||
PARTITION_MONTH = NULL
|
PARTITION_MONTH = NULL
|
||||||
WHERE A_SOURCE_FILE_RECEIVED_KEY = pSourceFileReceivedKey
|
WHERE A_SOURCE_FILE_RECEIVED_KEY = pSourceFileReceivedKey
|
||||||
@@ -620,7 +621,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
SET PROCESSING_STATUS = 'INGESTED',
|
SET PROCESSING_STATUS = 'INGESTED',
|
||||||
ARCH_FILE_NAME = NULL,
|
ARCH_PATH = NULL,
|
||||||
PARTITION_YEAR = NULL,
|
PARTITION_YEAR = NULL,
|
||||||
PARTITION_MONTH = NULL
|
PARTITION_MONTH = NULL
|
||||||
WHERE A_SOURCE_FILE_CONFIG_KEY = pSourceFileConfigKey
|
WHERE A_SOURCE_FILE_CONFIG_KEY = pSourceFileConfigKey
|
||||||
@@ -660,7 +661,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
SET PROCESSING_STATUS = 'INGESTED',
|
SET PROCESSING_STATUS = 'INGESTED',
|
||||||
ARCH_FILE_NAME = NULL,
|
ARCH_PATH = NULL,
|
||||||
PARTITION_YEAR = NULL,
|
PARTITION_YEAR = NULL,
|
||||||
PARTITION_MONTH = NULL
|
PARTITION_MONTH = NULL
|
||||||
WHERE PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
WHERE PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
||||||
@@ -1041,8 +1042,8 @@ AS
|
|||||||
SELECT
|
SELECT
|
||||||
A_SOURCE_FILE_CONFIG_KEY,
|
A_SOURCE_FILE_CONFIG_KEY,
|
||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
A_SOURCE_KEY
|
A_SOURCE_KEY
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
@@ -1058,16 +1059,16 @@ AS
|
|||||||
)
|
)
|
||||||
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
||||||
) LOOP
|
) LOOP
|
||||||
IF config_rec.ARCHIVE_ENABLED = 'N' THEN
|
IF config_rec.IS_ARCHIVE_ENABLED = 'N' THEN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Skipping table ' || config_rec.TABLE_ID || ' (ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
'Skipping table ' || config_rec.TABLE_ID || ' (IS_ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
vTablesSkipped := vTablesSkipped + 1;
|
vTablesSkipped := vTablesSkipped + 1;
|
||||||
ELSE
|
ELSE
|
||||||
BEGIN
|
BEGIN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Archiving table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', KEEP_IN_TRASH=' || config_rec.KEEP_IN_TRASH || ']',
|
'Archiving table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', IS_KEEP_IN_TRASH=' || config_rec.IS_KEEP_IN_TRASH || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -1174,14 +1175,14 @@ AS
|
|||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Set enabled filter info
|
-- Set enabled filter info
|
||||||
vEnabledFilter := CASE WHEN pOnlyEnabled THEN 'ARCHIVE_ENABLED=Y only' ELSE 'All tables' END;
|
vEnabledFilter := CASE WHEN pOnlyEnabled THEN 'IS_ARCHIVE_ENABLED=Y only' ELSE 'All tables' END;
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Filter mode: ' || vEnabledFilter, 'INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Filter mode: ' || vEnabledFilter, 'INFO');
|
||||||
|
|
||||||
FOR config_rec IN (
|
FOR config_rec IN (
|
||||||
SELECT
|
SELECT
|
||||||
A_SOURCE_FILE_CONFIG_KEY,
|
A_SOURCE_FILE_CONFIG_KEY,
|
||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
A_SOURCE_KEY
|
A_SOURCE_KEY
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
@@ -1195,20 +1196,20 @@ AS
|
|||||||
-- Level 3: All configs when pGatherAll = TRUE
|
-- Level 3: All configs when pGatherAll = TRUE
|
||||||
(pSourceFileConfigKey IS NULL AND pSourceKey IS NULL AND pGatherAll = TRUE)
|
(pSourceFileConfigKey IS NULL AND pSourceKey IS NULL AND pGatherAll = TRUE)
|
||||||
)
|
)
|
||||||
-- Apply ARCHIVE_ENABLED filter if pOnlyEnabled = TRUE
|
-- Apply IS_ARCHIVE_ENABLED filter if pOnlyEnabled = TRUE
|
||||||
AND (pOnlyEnabled = FALSE OR ARCHIVE_ENABLED = 'Y')
|
AND (pOnlyEnabled = FALSE OR IS_ARCHIVE_ENABLED = 'Y')
|
||||||
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
||||||
) LOOP
|
) LOOP
|
||||||
IF pOnlyEnabled AND config_rec.ARCHIVE_ENABLED = 'N' THEN
|
IF pOnlyEnabled AND config_rec.IS_ARCHIVE_ENABLED = 'N' THEN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Skipping table ' || config_rec.TABLE_ID || ' (ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
'Skipping table ' || config_rec.TABLE_ID || ' (IS_ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
vTablesSkipped := vTablesSkipped + 1;
|
vTablesSkipped := vTablesSkipped + 1;
|
||||||
ELSE
|
ELSE
|
||||||
BEGIN
|
BEGIN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Gathering statistics for table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', ARCHIVE_ENABLED=' || config_rec.ARCHIVE_ENABLED || ']',
|
'Gathering statistics for table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', IS_ARCHIVE_ENABLED=' || config_rec.IS_ARCHIVE_ENABLED || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ AS
|
|||||||
|
|
||||||
-- Version History (Latest changes first)
|
-- Version History (Latest changes first)
|
||||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
'3.3.0 (2026-02-11): Added ARCHIVE_ENABLED and KEEP_IN_TRASH columns to A_SOURCE_FILE_CONFIG for selective archiving and config-based TRASH policy. Removed pKeepInTrash parameter (now from config). Added ARCHIVE_ALL batch procedure with 3-level granularity (config/source/all). Added GATHER_TABLE_STAT_ALL batch statistics procedure with 3-level granularity. Added RESTORE_FILE_FROM_TRASH and PURGE_TRASH_FOLDER with 3-level granularity' || CHR(13)||CHR(10) ||
|
'3.3.0 (2026-02-11): Added IS_ARCHIVE_ENABLED and IS_KEEP_IN_TRASH columns to A_SOURCE_FILE_CONFIG for selective archiving and config-based TRASH policy. Removed pKeepInTrash parameter (now from config). Added ARCHIVE_ALL batch procedure with 3-level granularity (config/source/all). Added GATHER_TABLE_STAT_ALL batch statistics procedure with 3-level granularity. Added RESTORE_FILE_FROM_TRASH and PURGE_TRASH_FOLDER with 3-level granularity' || CHR(13)||CHR(10) ||
|
||||||
'3.2.1 (2026-02-10): Fixed status update - ARCHIVED → ARCHIVED_AND_TRASHED when moving files to TRASH folder (critical bug fix)' || CHR(13)||CHR(10) ||
|
'3.2.1 (2026-02-10): Fixed status update - ARCHIVED → ARCHIVED_AND_TRASHED when moving files to TRASH folder (critical bug fix)' || CHR(13)||CHR(10) ||
|
||||||
'3.2.0 (2026-02-06): Added pKeepInTrash parameter (DEFAULT TRUE) to ARCHIVE_TABLE_DATA for TRASH folder retention control - files kept in TRASH subfolder (DATA bucket) by default for safety and compliance' || CHR(13)||CHR(10) ||
|
'3.2.0 (2026-02-06): Added pKeepInTrash parameter (DEFAULT TRUE) to ARCHIVE_TABLE_DATA for TRASH folder retention control - files kept in TRASH subfolder (DATA bucket) by default for safety and compliance' || CHR(13)||CHR(10) ||
|
||||||
'3.1.2 (2026-02-06): Fixed missing PARTITION_YEAR/PARTITION_MONTH assignments in UPDATE statement and export query circular dependency (now filters by workflow_start instead of partition fields)' || CHR(13)||CHR(10) ||
|
'3.1.2 (2026-02-06): Fixed missing PARTITION_YEAR/PARTITION_MONTH assignments in UPDATE statement and export query circular dependency (now filters by workflow_start instead of partition fields)' || CHR(13)||CHR(10) ||
|
||||||
@@ -51,7 +51,7 @@ AS
|
|||||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
* Exports data from table specified by pSourceFileConfigKey(A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY) into PARQUET file on OCI infrustructure.
|
* Exports data from table specified by pSourceFileConfigKey(A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY) into PARQUET file on OCI infrustructure.
|
||||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||||
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
||||||
**/
|
**/
|
||||||
PROCEDURE ARCHIVE_TABLE_DATA (
|
PROCEDURE ARCHIVE_TABLE_DATA (
|
||||||
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
||||||
@@ -62,7 +62,7 @@ AS
|
|||||||
* @desc Function wrapper for ARCHIVE_TABLE_DATA procedure.
|
* @desc Function wrapper for ARCHIVE_TABLE_DATA procedure.
|
||||||
* Returns SQLCODE for Python library integration.
|
* Returns SQLCODE for Python library integration.
|
||||||
* Calls the main ARCHIVE_TABLE_DATA procedure and captures execution result.
|
* Calls the main ARCHIVE_TABLE_DATA procedure and captures execution result.
|
||||||
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
||||||
* @example SELECT FILE_ARCHIVER.FN_ARCHIVE_TABLE_DATA(pSourceFileConfigKey => 123) FROM DUAL;
|
* @example SELECT FILE_ARCHIVER.FN_ARCHIVE_TABLE_DATA(pSourceFileConfigKey => 123) FROM DUAL;
|
||||||
* @ex_rslt 0 (success) or error code
|
* @ex_rslt 0 (success) or error code
|
||||||
**/
|
**/
|
||||||
@@ -96,16 +96,16 @@ AS
|
|||||||
/**
|
/**
|
||||||
* @name GATHER_TABLE_STAT_ALL
|
* @name GATHER_TABLE_STAT_ALL
|
||||||
* @desc Multi-level batch statistics gathering procedure with three granularity levels.
|
* @desc Multi-level batch statistics gathering procedure with three granularity levels.
|
||||||
* Processes configurations based on ARCHIVE_ENABLED setting (when pOnlyEnabled=TRUE).
|
* Processes configurations based on IS_ARCHIVE_ENABLED setting (when pOnlyEnabled=TRUE).
|
||||||
* Gathers statistics for external tables and inserts data into A_TABLE_STAT and A_TABLE_STAT_HIST.
|
* Gathers statistics for external tables and inserts data into A_TABLE_STAT and A_TABLE_STAT_HIST.
|
||||||
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
||||||
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (e.g., 'LM', 'C2D') (medium priority)
|
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (e.g., 'LM', 'C2D') (medium priority)
|
||||||
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
||||||
* @param pOnlyEnabled - When TRUE (default), only process tables with ARCHIVE_ENABLED='Y'
|
* @param pOnlyEnabled - When TRUE (default), only process tables with IS_ARCHIVE_ENABLED='Y'
|
||||||
* @example -- Level 1: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceFileConfigKey => 123);
|
* @example -- Level 1: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceFileConfigKey => 123);
|
||||||
* @example -- Level 2: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceKey => 'LM');
|
* @example -- Level 2: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceKey => 'LM');
|
||||||
* @example -- Level 3: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE);
|
* @example -- Level 3: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE);
|
||||||
* @example -- All tables regardless of ARCHIVE_ENABLED: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE, pOnlyEnabled => FALSE);
|
* @example -- All tables regardless of IS_ARCHIVE_ENABLED: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE, pOnlyEnabled => FALSE);
|
||||||
**/
|
**/
|
||||||
PROCEDURE GATHER_TABLE_STAT_ALL (
|
PROCEDURE GATHER_TABLE_STAT_ALL (
|
||||||
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE DEFAULT NULL,
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE DEFAULT NULL,
|
||||||
@@ -122,7 +122,7 @@ AS
|
|||||||
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
||||||
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (medium priority)
|
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (medium priority)
|
||||||
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
||||||
* @param pOnlyEnabled - When TRUE (default), only process tables with ARCHIVE_ENABLED='Y'
|
* @param pOnlyEnabled - When TRUE (default), only process tables with IS_ARCHIVE_ENABLED='Y'
|
||||||
* @example SELECT FILE_ARCHIVER.FN_GATHER_TABLE_STAT_ALL(pSourceKey => 'LM') FROM DUAL;
|
* @example SELECT FILE_ARCHIVER.FN_GATHER_TABLE_STAT_ALL(pSourceKey => 'LM') FROM DUAL;
|
||||||
* @ex_rslt 0 (success) or error code
|
* @ex_rslt 0 (success) or error code
|
||||||
**/
|
**/
|
||||||
@@ -136,8 +136,8 @@ AS
|
|||||||
/**
|
/**
|
||||||
* @name ARCHIVE_ALL
|
* @name ARCHIVE_ALL
|
||||||
* @desc Multi-level batch archival procedure with three granularity levels.
|
* @desc Multi-level batch archival procedure with three granularity levels.
|
||||||
* Only processes configurations where ARCHIVE_ENABLED='Y'.
|
* Only processes configurations where IS_ARCHIVE_ENABLED='Y'.
|
||||||
* TRASH policy for each table is controlled by individual KEEP_IN_TRASH column.
|
* TRASH policy for each table is controlled by individual IS_KEEP_IN_TRASH column.
|
||||||
* @param pSourceFileConfigKey - (LEVEL 1) Archive specific configuration key (highest priority)
|
* @param pSourceFileConfigKey - (LEVEL 1) Archive specific configuration key (highest priority)
|
||||||
* @param pSourceKey - (LEVEL 2) Archive all enabled tables for source system (e.g., 'LM', 'C2D') (medium priority)
|
* @param pSourceKey - (LEVEL 2) Archive all enabled tables for source system (e.g., 'LM', 'C2D') (medium priority)
|
||||||
* @param pArchiveAll - (LEVEL 3) When TRUE, archive ALL enabled tables across all sources (lowest priority)
|
* @param pArchiveAll - (LEVEL 3) When TRUE, archive ALL enabled tables across all sources (lowest priority)
|
||||||
|
|||||||
2009
MARS_Packages/REL01_ADDITIONS/MARS-828/new_version/FILE_MANAGER.pkb
Normal file
2009
MARS_Packages/REL01_ADDITIONS/MARS-828/new_version/FILE_MANAGER.pkb
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,639 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.FILE_MANAGER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||||
|
* It is a standard.
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Example comment:
|
||||||
|
/**
|
||||||
|
* @name EX_PROCEDURE_NAME
|
||||||
|
* @desc Procedure description
|
||||||
|
* @example select FILE_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||||
|
* @ex_rslt Example Result
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.5.1';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-24 13:35:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.5.1 (2026-02-24): Fixed TIMESTAMP field syntax in GENERATE_EXTERNAL_TABLE_PARAMS for SQL*Loader compatibility (CHAR(35) DATE_FORMAT TIMESTAMP MASK format)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.2 (2026-02-20): MARS-828 - Fixed threshold column names in GET_DET_SOURCE_FILE_CONFIG_INFO for MARS-828 compatibility' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.1 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.0 (2025-10-22): Added package versioning system using centralized ENV_MANAGER functions' || CHR(13)||CHR(10) ||
|
||||||
|
'3.1.0 (2025-10-20): Enhanced PROCESS_SOURCE_FILE with 6-step validation workflow' || CHR(13)||CHR(10) ||
|
||||||
|
'3.0.0 (2025-10-15): Separated export procedures into dedicated DATA_EXPORTER package' || CHR(13)||CHR(10) ||
|
||||||
|
'2.5.0 (2025-10-10): Added DELETE_SOURCE_CASCADE for safe configuration removal' || CHR(13)||CHR(10) ||
|
||||||
|
'2.0.0 (2025-09-25): Added official path patterns support (INBOX 3-level, ODS 2-level, ARCHIVE 2-level)' || CHR(13)||CHR(10) ||
|
||||||
|
'1.0.0 (2025-09-01): Initial release with file processing and validation capabilities';
|
||||||
|
|
||||||
|
TYPE tSourceFileReceived IS RECORD
|
||||||
|
(
|
||||||
|
A_SOURCE_FILE_RECEIVED_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE,
|
||||||
|
A_SOURCE_FILE_CONFIG_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY%TYPE,
|
||||||
|
SOURCE_FILE_PREFIX_INBOX VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ODS VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_QUARANTINE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ARCHIVE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME%TYPE,
|
||||||
|
RECEPTION_DATE CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE%TYPE,
|
||||||
|
PROCESSING_STATUS CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS%TYPE,
|
||||||
|
EXTERNAL_TABLE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgSourceFileConfigKey PLS_INTEGER;
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Get source file type by matching the source file name against source file type naming patterns
|
||||||
|
* or by specifying the id of a received source file.
|
||||||
|
* @example ...
|
||||||
|
* @ex_rslt "CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE"
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG(pFileUri IN VARCHAR2 DEFAULT NULL
|
||||||
|
, pSourceFileReceivedKey IN NUMBER DEFAULT NULL
|
||||||
|
, pSourceFileConfigKey IN NUMBER DEFAULT NULL)
|
||||||
|
RETURN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a newly received source file in A_SOURCE_FILE_RECEIVED table.
|
||||||
|
* This overload automatically determines source file type from the file name.
|
||||||
|
* It returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a new new source file in A_SOURCE_FILE_RECEIVED table based on pSourceFileReceivedName and pSourceFileConfig.
|
||||||
|
* Then it returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(
|
||||||
|
* pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv'
|
||||||
|
* ,pSourceFileConfig => ...A_SOURCE_FILE_CONFIG%ROWTYPE... );
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2,
|
||||||
|
pSourceFileConfig IN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name SET_SOURCE_FILE_RECEIVED_STATUS
|
||||||
|
* @desc Set status of file in A_SOURCE_FILE_RECEIVED table - PROCESSING_STATUS column
|
||||||
|
* based on A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY
|
||||||
|
* and provided value of pStatus parameter
|
||||||
|
* @example exec FILE_MANAGER.SET_SOURCE_FILE_RECEIVED_STATUS(pSourceFileReceivedKey => 377, pStatus => 'READY_FOR_INGESTION');
|
||||||
|
**/
|
||||||
|
PROCEDURE SET_SOURCE_FILE_RECEIVED_STATUS(
|
||||||
|
pSourceFileReceivedKey IN PLS_INTEGER,
|
||||||
|
pStatus IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_EXTERNAL_TABLE_COLUMNS
|
||||||
|
* @desc Function used to get string with all table columns definitions based on pTargetTableTemplate "TEMPLATE TABLE" name.
|
||||||
|
* It used for creating "EXTERNAL TABLE" using CREATE_EXTERNAL_TABLE procedure.
|
||||||
|
* @example select FILE_MANAGER.GET_EXTERNAL_TABLE_COLUMNS(pTargetTableTemplate => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER') from dual;
|
||||||
|
* @ex_rslt "A_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "A_WORKFLOW_HISTORY_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "REV_NUMBER" NUMBER(28,0),
|
||||||
|
* "REF_DATE" DATE,
|
||||||
|
* "FREE_TEXT" VARCHAR2(1000 CHAR),
|
||||||
|
* "MLF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "MLF_SF_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_SF_TOTAL" NUMBER(28,10)
|
||||||
|
**/
|
||||||
|
FUNCTION GET_EXTERNAL_TABLE_COLUMNS (
|
||||||
|
pTargetTableTemplate IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN CLOB;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.CREATE_EXTERNAL_TABLE which creates External Table
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252')
|
||||||
|
* If provided, adds CHARACTERSET clause to external table definition
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLE(
|
||||||
|
* pTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER',
|
||||||
|
* pPrefix => 'ODS/LM/STANDING_FACILITIES_HEADER/',
|
||||||
|
* pBucketUri => 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/',
|
||||||
|
* pFileName => NULL,
|
||||||
|
* pDelimiter => ',',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pPrefix IN VARCHAR2,
|
||||||
|
pBucketUri IN VARCHAR2 DEFAULT ENV_MANAGER.gvInboxBucketUri,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pDelimiter IN VARCHAR2 DEFAULT ',',
|
||||||
|
pEncoding IN VARCHAR2 DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc Creates External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.CREATE_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE
|
||||||
|
* It validate External table build upon single file
|
||||||
|
* provided by pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.VALIDATE_SOURCE_FILE_RECEIVED(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper function for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE.
|
||||||
|
* It validates External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt FAILED
|
||||||
|
**/
|
||||||
|
FUNCTION VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name S_VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A function which checks if SELECT query reterns any rows.
|
||||||
|
* It trys to selects External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.S_VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt PASSED
|
||||||
|
**/
|
||||||
|
FUNCTION S_VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DROP_EXTERNAL_TABLE
|
||||||
|
* @desc It drops External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.DROP_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE DROP_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name COPY_FILE
|
||||||
|
* @desc It copies file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS'
|
||||||
|
* @example exec FILE_MANAGER.COPY_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE COPY_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name MOVE_FILE
|
||||||
|
* @desc It moves file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS', 'QUARANTINE'
|
||||||
|
* @example exec FILE_MANAGER.MOVE_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE MOVE_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_FOLDER_CONTENTS
|
||||||
|
* @desc It deletes all files from specified folder in the cloud storage.
|
||||||
|
* The procedure lists all objects in the specified folder prefix and deletes them one by one.
|
||||||
|
* pBucketArea parameter specifies which bucket to use: 'INBOX', 'DATA', 'ARCHIVE'
|
||||||
|
* pFolderPrefix parameter specifies the folder path within the bucket (e.g., 'C2D/UC_DISSEM/UC_NMA_DISSEM/')
|
||||||
|
* @example exec FILE_MANAGER.DELETE_FOLDER_CONTENTS(pBucketArea => 'INBOX', pFolderPrefix => 'C2D/UC_DISSEM/UC_NMA_DISSEM/');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_FOLDER_CONTENTS(
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderPrefix IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter.
|
||||||
|
* Ubmrella procedure that calls:
|
||||||
|
* - REGISTER_SOURCE_FILE_RECEIVED;
|
||||||
|
* - CREATE_EXTERNAL_TABLE;
|
||||||
|
* - VALIDATE_SOURCE_FILE_RECEIVED;
|
||||||
|
* - DROP_EXTERNAL_TABLE;
|
||||||
|
* - MOVE_FILE;
|
||||||
|
* @example exec FILE_MANAGER.PROCESS_SOURCE_FILE(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
**/
|
||||||
|
PROCEDURE PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter and return processing result value.
|
||||||
|
* It returns (success/failure) => 0 / -(value).
|
||||||
|
* Ubmrella function that calls PROCESS_SOURCE_FILE procedure.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vResult PLS_INTEGER;
|
||||||
|
* begin
|
||||||
|
* vResult := CT_MRDS.FILE_MANAGER.PROCESS_SOURCE_FILE(PSOURCEFILERECEIVEDNAME => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vResult = ' || vResult);
|
||||||
|
* end;
|
||||||
|
* @ex_rslt 0
|
||||||
|
* -20021
|
||||||
|
**/
|
||||||
|
FUNCTION PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DATE_FORMAT
|
||||||
|
* @desc Returns date format for specified template table name and column name.
|
||||||
|
* Date is taken from configuration A_COLUMN_DATE_FORMAT table.
|
||||||
|
* @example select FILE_MANAGER.GET_DATE_FORMAT(
|
||||||
|
* pTemplateTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pColumnName => 'SNAPSHOT_DATE')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt DD/MM/YYYY HH24:MI:SS
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DATE_FORMAT(
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnName IN VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GENERATE_EXTERNAL_TABLE_PARAMS
|
||||||
|
* @desc It builds two strings: pColumnList and pFieldList for specified Template Table name, by parameter: pTemplateTableName.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vColumnList CLOB;
|
||||||
|
* vFieldList CLOB;
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER'
|
||||||
|
* ,pColumnList => vColumnList
|
||||||
|
* ,pFieldList => vFieldList
|
||||||
|
* );
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vColumnList = '||vColumnList);
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vFieldList = '||vFieldList);
|
||||||
|
* end;
|
||||||
|
* /
|
||||||
|
**/
|
||||||
|
PROCEDURE GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnList OUT CLOB,
|
||||||
|
pFieldList OUT CLOB
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE
|
||||||
|
* @desc Insert a new record to A_SOURCE table.
|
||||||
|
* pSourceKey is a PRIMARY KEY value.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE,
|
||||||
|
pSourceName IN CT_MRDS.A_SOURCE.SOURCE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_SOURCE_CASCADE
|
||||||
|
* @desc Safely deletes a SOURCE specified by pSourceKey parameter from A_SOURCE table and all dependent tables:
|
||||||
|
* - A_SOURCE_FILE_CONFIG
|
||||||
|
* - A_SOURCE_FILE_RECEIVED
|
||||||
|
* - A_COLUMN_DATE_FORMAT (only if template table is not shared with other source systems)
|
||||||
|
* The procedure checks if template tables are shared before deleting date format configurations.
|
||||||
|
* If a template table is used by multiple source systems, date formats are preserved.
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.DELETE_SOURCE_CASCADE(pSourceKey => 'TEST_SYS');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_SOURCE_CASCADE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_CONTAINER_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified parameter pSourceFileId (A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID)
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY for related CONTAINER record.
|
||||||
|
* @example select FILE_MANAGER.GET_CONTAINER_SOURCE_FILE_CONFIG_KEY(
|
||||||
|
* pSourceFileId => 'UC_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_CONTAINER_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified input parameters,
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY.
|
||||||
|
* @example select FILE_MANAGER.GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
* pSourceFileType => 'INPUT'
|
||||||
|
* ,pSourceFileId => 'UC_DISSEM'
|
||||||
|
* ,pTableId => 'UC_NMA_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE DEFAULT 'INPUT'
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Insert a new record to A_SOURCE_FILE_CONFIG table.
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification.
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252', 'EE8ISO8859P2')
|
||||||
|
* If NULL, no CHARACTERSET clause is added to external table definitions
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.ADD_SOURCE_FILE_CONFIG(
|
||||||
|
* pSourceKey => 'C2D', pSourceFileType => 'INPUT',
|
||||||
|
* pSourceFileId => 'UC_DISSEM', pTableId => 'METADATA_LOADS',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.C2D_A_UC_DISSEM_METADATA_LOADS',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE_FILE_CONFIG (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY%TYPE
|
||||||
|
,pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pSourceFileDesc IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC%TYPE
|
||||||
|
,pSourceFileNamePattern IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE DEFAULT NULL
|
||||||
|
,pTemplateTableName IN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME%TYPE DEFAULT NULL
|
||||||
|
,pContainerFileKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY%TYPE DEFAULT NULL
|
||||||
|
,pEncoding IN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING%TYPE DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_COLUMN_DATE_FORMAT
|
||||||
|
* @desc Insert a new record to A_COLUMN_DATE_FORMAT table.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_COLUMN_DATE_FORMAT (
|
||||||
|
pTemplateTableName IN CT_MRDS.A_COLUMN_DATE_FORMAT.TEMPLATE_TABLE_NAME%TYPE
|
||||||
|
,pColumnName IN CT_MRDS.A_COLUMN_DATE_FORMAT.COLUMN_NAME%TYPE
|
||||||
|
,pDateFormat IN CT_MRDS.A_COLUMN_DATE_FORMAT.DATE_FORMAT%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUCKET_URI
|
||||||
|
* @desc Function used to get string with bucket http url.
|
||||||
|
* Possible input values for pBucketArea are: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example select FILE_MANAGER.GET_BUCKET_URI(pBucketArea => 'ODS') from dual;
|
||||||
|
* @ex_rslt https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUCKET_URI(pBucketArea VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_CONFIG_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_CONFIG record
|
||||||
|
* for specified pSourceFileConfigKey (A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY).
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
* pSourceFileConfigKey => 128
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about File Configuration:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 128
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Details about related Container Config:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 126
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Column Date Format config entries:
|
||||||
|
* --------------------------------
|
||||||
|
* TEMPLATE_TABLE_NAME = CT_ET_TEMPLATES.C2D_UC_MA_DISSEM
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_RECEIVED_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_RECEIVED record
|
||||||
|
* for specified pSourceFileReceivedKey (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY).
|
||||||
|
* If pIncludeConfigInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
* pSourceFileReceivedKey => 377
|
||||||
|
* ,pIncludeConfigInfo => 1
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
*
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
pSourceFileReceivedKey IN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE
|
||||||
|
,pIncludeConfigInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_USER_LOAD_OPERATIONS
|
||||||
|
* @desc Function returns details from USER_LOAD_OPERATIONS table
|
||||||
|
* for specified pOperationId.
|
||||||
|
* @example select FILE_MANAGER.GET_DET_USER_LOAD_OPERATIONS (pOperationId => 3608) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about USER_LOAD_OPERATIONS where ID = 3608
|
||||||
|
* --------------------------------
|
||||||
|
* ID = 3608
|
||||||
|
* TYPE = VALIDATE
|
||||||
|
* SID = 31260
|
||||||
|
* SERIAL# = 52915
|
||||||
|
* START_TIME = 2025-05-20 10.08.24.436983 EUROPE/BELGRADE
|
||||||
|
* UPDATE_TIME = 2025-05-20 10.08.24.458643 EUROPE/BELGRADE
|
||||||
|
* STATUS = FAILED
|
||||||
|
* OWNER_NAME = CT_MRDS
|
||||||
|
* TABLE_NAME = STANDING_FACILITIES_HEADER
|
||||||
|
* PARTITION_NAME =
|
||||||
|
* SUBPARTITION_NAME =
|
||||||
|
* FILE_URI_LIST =
|
||||||
|
* ROWS_LOADED =
|
||||||
|
* LOGFILE_TABLE = VALIDATE$3608_LOG
|
||||||
|
* BADFILE_TABLE = VALIDATE$3608_BAD
|
||||||
|
* STATUS_TABLE =
|
||||||
|
* TEMPEXT_TABLE =
|
||||||
|
* CREDENTIAL_NAME =
|
||||||
|
* EXPIRATION_TIME = 2025-05-22 10.08.24.436983000 EUROPE/BELGRADE
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_USER_LOAD_OPERATIONS (
|
||||||
|
pOperationId PLS_INTEGER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ANALYZE_VALIDATION_ERRORS
|
||||||
|
* @desc Wrapper function that analyzes validation errors for a source file using its received key.
|
||||||
|
* Automatically derives template schema, table name, CSV URI and validation log table
|
||||||
|
* from file metadata and calls ENV_MANAGER.ANALYZE_VALIDATION_ERRORS.
|
||||||
|
* @example SELECT FILE_MANAGER.ANALYZE_VALIDATION_ERRORS(63) FROM DUAL;
|
||||||
|
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||||
|
**/
|
||||||
|
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION
|
||||||
|
* @desc Returns the current version number of the FILE_MANAGER package.
|
||||||
|
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION() FROM DUAL;
|
||||||
|
* @ex_rslt 3.2.0
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUILD_INFO
|
||||||
|
* @desc Returns comprehensive build information including version, build date, and author.
|
||||||
|
* Uses centralized ENV_MANAGER.GET_PACKAGE_VERSION_INFO function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||||
|
* @ex_rslt Package: FILE_MANAGER
|
||||||
|
* Version: 3.2.0
|
||||||
|
* Build Date: 2025-10-22 16:30:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION_HISTORY
|
||||||
|
* @desc Returns complete version history with all releases and changes.
|
||||||
|
* Uses centralized ENV_MANAGER.FORMAT_VERSION_HISTORY function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||||
|
* @ex_rslt FILE_MANAGER Version History:
|
||||||
|
* 3.2.0 (2025-10-22): Added package versioning system...
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
|
|
||||||
|
/
|
||||||
@@ -33,10 +33,14 @@ PROMPT This will restore FILE_ARCHIVER to v2.0.0
|
|||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Rollback steps:
|
PROMPT Rollback steps:
|
||||||
PROMPT 1. Rollback TRASH retention statuses
|
PROMPT 1. Rollback TRASH retention statuses
|
||||||
PROMPT 2. Remove validation trigger
|
PROMPT 2. Revoke T_FILENAME privileges
|
||||||
PROMPT 3. Drop all configuration columns (ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, ARCHIVE_ENABLED, KEEP_IN_TRASH)
|
PROMPT 3. Remove validation trigger
|
||||||
PROMPT 4. Restore FILE_ARCHIVER package to v2.0.0
|
PROMPT 4. Remove column comments (OPTIONAL - does not affect functionality)
|
||||||
PROMPT 5. Revert all archival strategies to THRESHOLD_BASED
|
PROMPT 5. Revert threshold column renames (restore original naming)
|
||||||
|
PROMPT 6. Drop all configuration columns (ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, IS_ARCHIVE_ENABLED, IS_KEEP_IN_TRASH)
|
||||||
|
PROMPT 7. Restore FILE_ARCHIVER package to v2.0.0
|
||||||
|
PROMPT 8. Restore FILE_MANAGER package to v3.3.1
|
||||||
|
PROMPT 9. Revert all archival strategies to THRESHOLD_BASED
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Timestamp:
|
PROMPT Timestamp:
|
||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_start FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_start FROM DUAL;
|
||||||
@@ -55,33 +59,61 @@ WHENEVER SQLERROR CONTINUE
|
|||||||
|
|
||||||
-- Rollback steps (in reverse order)
|
-- Rollback steps (in reverse order)
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 1/6: Rolling back TRASH retention statuses
|
PROMPT Step 1/9: Rolling back TRASH retention statuses
|
||||||
PROMPT ================================================
|
PROMPT ================================================
|
||||||
@@90_MARS_828_rollback_trash_retention_statuses.sql
|
@@90_MARS_828_rollback_trash_retention_statuses.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 2/6: Dropping validation trigger
|
PROMPT Step 2/9: Revoking T_FILENAME privileges from MRDS_LOADER
|
||||||
|
PROMPT ==========================================================
|
||||||
|
@@95_MARS_828_rollback_grant_t_filename.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 3/9: Dropping validation trigger
|
||||||
PROMPT ======================================
|
PROMPT ======================================
|
||||||
@@93_MARS_828_rollback_trigger.sql
|
@@93_MARS_828_rollback_trigger.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 3/6: Dropping all archival configuration columns
|
PROMPT Step 4/9 (OPTIONAL): Removing column comments
|
||||||
|
PROMPT ==============================================
|
||||||
|
PROMPT NOTE: This is optional - comments do not affect functionality
|
||||||
|
PROMPT Skipping column comments removal in standard rollback
|
||||||
|
PROMPT Execute 94b_MARS_828_rollback_column_comments.sql manually if needed
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 5/9: Reverting threshold column renames
|
||||||
|
PROMPT =============================================
|
||||||
|
@@94a_MARS_828_rollback_threshold_rename.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 6/9: Dropping all archival configuration columns
|
||||||
PROMPT ======================================================
|
PROMPT ======================================================
|
||||||
@@94_MARS_828_rollback_columns.sql
|
@@94_MARS_828_rollback_columns.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 4/6: Restoring FILE_ARCHIVER Package Specification v2.0.0
|
PROMPT Step 7/9: Restoring FILE_ARCHIVER Package Specification v2.0.0
|
||||||
PROMPT ===============================================================
|
PROMPT ===============================================================
|
||||||
@@91_MARS_828_rollback_FILE_ARCHIVER_SPEC.sql
|
@@91_MARS_828_rollback_FILE_ARCHIVER_SPEC.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 5/6: Restoring FILE_ARCHIVER Package Body v2.0.0
|
PROMPT Step 8/11: Restoring FILE_ARCHIVER Package Body v2.0.0
|
||||||
PROMPT ======================================================
|
PROMPT =======================================================
|
||||||
@@92_MARS_828_rollback_FILE_ARCHIVER_BODY.sql
|
@@92_MARS_828_rollback_FILE_ARCHIVER_BODY.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 6/6: Verifying tracked packages
|
PROMPT Step 9/11: Restoring FILE_MANAGER Package Specification v3.3.1
|
||||||
PROMPT =====================================
|
PROMPT ===============================================================
|
||||||
|
@@97_MARS_828_rollback_FILE_MANAGER_SPEC.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 10/11: Restoring FILE_MANAGER Package Body v3.3.1
|
||||||
|
PROMPT ======================================================
|
||||||
|
@@98_MARS_828_rollback_FILE_MANAGER_BODY.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 11/11: Verifying tracked packages
|
||||||
|
PROMPT ======================================
|
||||||
@@verify_packages_version.sql
|
@@verify_packages_version.sql
|
||||||
|
|
||||||
-- Verify rollback
|
-- Verify rollback
|
||||||
@@ -91,9 +123,9 @@ PROMPT =========================================
|
|||||||
SELECT object_name, object_type, status, last_ddl_time
|
SELECT object_name, object_type, status, last_ddl_time
|
||||||
FROM all_objects
|
FROM all_objects
|
||||||
WHERE owner = 'CT_MRDS'
|
WHERE owner = 'CT_MRDS'
|
||||||
AND object_name = 'FILE_ARCHIVER'
|
AND object_name IN ('FILE_ARCHIVER', 'FILE_MANAGER')
|
||||||
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
ORDER BY object_type;
|
ORDER BY object_name, object_type;
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
@@ -103,8 +135,9 @@ PROMPT Completion Time:
|
|||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_end FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_end FROM DUAL;
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Rollback Summary:
|
PROMPT Rollback Summary:
|
||||||
PROMPT - Package: CT_MRDS.FILE_ARCHIVER
|
PROMPT - Packages Rolled Back:
|
||||||
PROMPT - Restored Version: 2.0.0 (THRESHOLD_BASED archival only)
|
PROMPT * CT_MRDS.FILE_ARCHIVER to v2.0.0 (THRESHOLD_BASED archival only)
|
||||||
|
PROMPT * CT_MRDS.FILE_MANAGER to v3.3.1 (pre-MARS-828 threshold column compatibility)
|
||||||
PROMPT - Removed Features: CURRENT_MONTH_ONLY, MINIMUM_AGE_MONTHS, HYBRID strategies
|
PROMPT - Removed Features: CURRENT_MONTH_ONLY, MINIMUM_AGE_MONTHS, HYBRID strategies
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Log file: &_filename
|
PROMPT Log file: &_filename
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,637 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.FILE_MANAGER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||||
|
* It is a standard.
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Example comment:
|
||||||
|
/**
|
||||||
|
* @name EX_PROCEDURE_NAME
|
||||||
|
* @desc Procedure description
|
||||||
|
* @example select FILE_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||||
|
* @ex_rslt Example Result
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.3.1';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-11-27 14:00:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.1 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.0 (2025-10-22): Added package versioning system using centralized ENV_MANAGER functions' || CHR(13)||CHR(10) ||
|
||||||
|
'3.1.0 (2025-10-20): Enhanced PROCESS_SOURCE_FILE with 6-step validation workflow' || CHR(13)||CHR(10) ||
|
||||||
|
'3.0.0 (2025-10-15): Separated export procedures into dedicated DATA_EXPORTER package' || CHR(13)||CHR(10) ||
|
||||||
|
'2.5.0 (2025-10-10): Added DELETE_SOURCE_CASCADE for safe configuration removal' || CHR(13)||CHR(10) ||
|
||||||
|
'2.0.0 (2025-09-25): Added official path patterns support (INBOX 3-level, ODS 2-level, ARCHIVE 2-level)' || CHR(13)||CHR(10) ||
|
||||||
|
'1.0.0 (2025-09-01): Initial release with file processing and validation capabilities';
|
||||||
|
|
||||||
|
TYPE tSourceFileReceived IS RECORD
|
||||||
|
(
|
||||||
|
A_SOURCE_FILE_RECEIVED_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE,
|
||||||
|
A_SOURCE_FILE_CONFIG_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY%TYPE,
|
||||||
|
SOURCE_FILE_PREFIX_INBOX VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ODS VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_QUARANTINE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ARCHIVE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME%TYPE,
|
||||||
|
RECEPTION_DATE CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE%TYPE,
|
||||||
|
PROCESSING_STATUS CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS%TYPE,
|
||||||
|
EXTERNAL_TABLE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgSourceFileConfigKey PLS_INTEGER;
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Get source file type by matching the source file name against source file type naming patterns
|
||||||
|
* or by specifying the id of a received source file.
|
||||||
|
* @example ...
|
||||||
|
* @ex_rslt "CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE"
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG(pFileUri IN VARCHAR2 DEFAULT NULL
|
||||||
|
, pSourceFileReceivedKey IN NUMBER DEFAULT NULL
|
||||||
|
, pSourceFileConfigKey IN NUMBER DEFAULT NULL)
|
||||||
|
RETURN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a newly received source file in A_SOURCE_FILE_RECEIVED table.
|
||||||
|
* This overload automatically determines source file type from the file name.
|
||||||
|
* It returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a new new source file in A_SOURCE_FILE_RECEIVED table based on pSourceFileReceivedName and pSourceFileConfig.
|
||||||
|
* Then it returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(
|
||||||
|
* pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv'
|
||||||
|
* ,pSourceFileConfig => ...A_SOURCE_FILE_CONFIG%ROWTYPE... );
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2,
|
||||||
|
pSourceFileConfig IN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name SET_SOURCE_FILE_RECEIVED_STATUS
|
||||||
|
* @desc Set status of file in A_SOURCE_FILE_RECEIVED table - PROCESSING_STATUS column
|
||||||
|
* based on A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY
|
||||||
|
* and provided value of pStatus parameter
|
||||||
|
* @example exec FILE_MANAGER.SET_SOURCE_FILE_RECEIVED_STATUS(pSourceFileReceivedKey => 377, pStatus => 'READY_FOR_INGESTION');
|
||||||
|
**/
|
||||||
|
PROCEDURE SET_SOURCE_FILE_RECEIVED_STATUS(
|
||||||
|
pSourceFileReceivedKey IN PLS_INTEGER,
|
||||||
|
pStatus IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_EXTERNAL_TABLE_COLUMNS
|
||||||
|
* @desc Function used to get string with all table columns definitions based on pTargetTableTemplate "TEMPLATE TABLE" name.
|
||||||
|
* It used for creating "EXTERNAL TABLE" using CREATE_EXTERNAL_TABLE procedure.
|
||||||
|
* @example select FILE_MANAGER.GET_EXTERNAL_TABLE_COLUMNS(pTargetTableTemplate => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER') from dual;
|
||||||
|
* @ex_rslt "A_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "A_WORKFLOW_HISTORY_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "REV_NUMBER" NUMBER(28,0),
|
||||||
|
* "REF_DATE" DATE,
|
||||||
|
* "FREE_TEXT" VARCHAR2(1000 CHAR),
|
||||||
|
* "MLF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "MLF_SF_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_SF_TOTAL" NUMBER(28,10)
|
||||||
|
**/
|
||||||
|
FUNCTION GET_EXTERNAL_TABLE_COLUMNS (
|
||||||
|
pTargetTableTemplate IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN CLOB;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.CREATE_EXTERNAL_TABLE which creates External Table
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252')
|
||||||
|
* If provided, adds CHARACTERSET clause to external table definition
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLE(
|
||||||
|
* pTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER',
|
||||||
|
* pPrefix => 'ODS/LM/STANDING_FACILITIES_HEADER/',
|
||||||
|
* pBucketUri => 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/',
|
||||||
|
* pFileName => NULL,
|
||||||
|
* pDelimiter => ',',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pPrefix IN VARCHAR2,
|
||||||
|
pBucketUri IN VARCHAR2 DEFAULT ENV_MANAGER.gvInboxBucketUri,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pDelimiter IN VARCHAR2 DEFAULT ',',
|
||||||
|
pEncoding IN VARCHAR2 DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc Creates External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.CREATE_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE
|
||||||
|
* It validate External table build upon single file
|
||||||
|
* provided by pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.VALIDATE_SOURCE_FILE_RECEIVED(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper function for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE.
|
||||||
|
* It validates External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt FAILED
|
||||||
|
**/
|
||||||
|
FUNCTION VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name S_VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A function which checks if SELECT query reterns any rows.
|
||||||
|
* It trys to selects External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.S_VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt PASSED
|
||||||
|
**/
|
||||||
|
FUNCTION S_VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DROP_EXTERNAL_TABLE
|
||||||
|
* @desc It drops External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.DROP_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE DROP_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name COPY_FILE
|
||||||
|
* @desc It copies file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS'
|
||||||
|
* @example exec FILE_MANAGER.COPY_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE COPY_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name MOVE_FILE
|
||||||
|
* @desc It moves file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS', 'QUARANTINE'
|
||||||
|
* @example exec FILE_MANAGER.MOVE_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE MOVE_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_FOLDER_CONTENTS
|
||||||
|
* @desc It deletes all files from specified folder in the cloud storage.
|
||||||
|
* The procedure lists all objects in the specified folder prefix and deletes them one by one.
|
||||||
|
* pBucketArea parameter specifies which bucket to use: 'INBOX', 'DATA', 'ARCHIVE'
|
||||||
|
* pFolderPrefix parameter specifies the folder path within the bucket (e.g., 'C2D/UC_DISSEM/UC_NMA_DISSEM/')
|
||||||
|
* @example exec FILE_MANAGER.DELETE_FOLDER_CONTENTS(pBucketArea => 'INBOX', pFolderPrefix => 'C2D/UC_DISSEM/UC_NMA_DISSEM/');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_FOLDER_CONTENTS(
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderPrefix IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter.
|
||||||
|
* Ubmrella procedure that calls:
|
||||||
|
* - REGISTER_SOURCE_FILE_RECEIVED;
|
||||||
|
* - CREATE_EXTERNAL_TABLE;
|
||||||
|
* - VALIDATE_SOURCE_FILE_RECEIVED;
|
||||||
|
* - DROP_EXTERNAL_TABLE;
|
||||||
|
* - MOVE_FILE;
|
||||||
|
* @example exec FILE_MANAGER.PROCESS_SOURCE_FILE(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
**/
|
||||||
|
PROCEDURE PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter and return processing result value.
|
||||||
|
* It returns (success/failure) => 0 / -(value).
|
||||||
|
* Ubmrella function that calls PROCESS_SOURCE_FILE procedure.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vResult PLS_INTEGER;
|
||||||
|
* begin
|
||||||
|
* vResult := CT_MRDS.FILE_MANAGER.PROCESS_SOURCE_FILE(PSOURCEFILERECEIVEDNAME => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vResult = ' || vResult);
|
||||||
|
* end;
|
||||||
|
* @ex_rslt 0
|
||||||
|
* -20021
|
||||||
|
**/
|
||||||
|
FUNCTION PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DATE_FORMAT
|
||||||
|
* @desc Returns date format for specified template table name and column name.
|
||||||
|
* Date is taken from configuration A_COLUMN_DATE_FORMAT table.
|
||||||
|
* @example select FILE_MANAGER.GET_DATE_FORMAT(
|
||||||
|
* pTemplateTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pColumnName => 'SNAPSHOT_DATE')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt DD/MM/YYYY HH24:MI:SS
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DATE_FORMAT(
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnName IN VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GENERATE_EXTERNAL_TABLE_PARAMS
|
||||||
|
* @desc It builds two strings: pColumnList and pFieldList for specified Template Table name, by parameter: pTemplateTableName.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vColumnList CLOB;
|
||||||
|
* vFieldList CLOB;
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER'
|
||||||
|
* ,pColumnList => vColumnList
|
||||||
|
* ,pFieldList => vFieldList
|
||||||
|
* );
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vColumnList = '||vColumnList);
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vFieldList = '||vFieldList);
|
||||||
|
* end;
|
||||||
|
* /
|
||||||
|
**/
|
||||||
|
PROCEDURE GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnList OUT CLOB,
|
||||||
|
pFieldList OUT CLOB
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE
|
||||||
|
* @desc Insert a new record to A_SOURCE table.
|
||||||
|
* pSourceKey is a PRIMARY KEY value.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE,
|
||||||
|
pSourceName IN CT_MRDS.A_SOURCE.SOURCE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_SOURCE_CASCADE
|
||||||
|
* @desc Safely deletes a SOURCE specified by pSourceKey parameter from A_SOURCE table and all dependent tables:
|
||||||
|
* - A_SOURCE_FILE_CONFIG
|
||||||
|
* - A_SOURCE_FILE_RECEIVED
|
||||||
|
* - A_COLUMN_DATE_FORMAT (only if template table is not shared with other source systems)
|
||||||
|
* The procedure checks if template tables are shared before deleting date format configurations.
|
||||||
|
* If a template table is used by multiple source systems, date formats are preserved.
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.DELETE_SOURCE_CASCADE(pSourceKey => 'TEST_SYS');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_SOURCE_CASCADE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_CONTAINER_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified parameter pSourceFileId (A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID)
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY for related CONTAINER record.
|
||||||
|
* @example select FILE_MANAGER.GET_CONTAINER_SOURCE_FILE_CONFIG_KEY(
|
||||||
|
* pSourceFileId => 'UC_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_CONTAINER_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified input parameters,
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY.
|
||||||
|
* @example select FILE_MANAGER.GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
* pSourceFileType => 'INPUT'
|
||||||
|
* ,pSourceFileId => 'UC_DISSEM'
|
||||||
|
* ,pTableId => 'UC_NMA_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE DEFAULT 'INPUT'
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Insert a new record to A_SOURCE_FILE_CONFIG table.
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification.
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252', 'EE8ISO8859P2')
|
||||||
|
* If NULL, no CHARACTERSET clause is added to external table definitions
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.ADD_SOURCE_FILE_CONFIG(
|
||||||
|
* pSourceKey => 'C2D', pSourceFileType => 'INPUT',
|
||||||
|
* pSourceFileId => 'UC_DISSEM', pTableId => 'METADATA_LOADS',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.C2D_A_UC_DISSEM_METADATA_LOADS',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE_FILE_CONFIG (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY%TYPE
|
||||||
|
,pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pSourceFileDesc IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC%TYPE
|
||||||
|
,pSourceFileNamePattern IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE DEFAULT NULL
|
||||||
|
,pTemplateTableName IN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME%TYPE DEFAULT NULL
|
||||||
|
,pContainerFileKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY%TYPE DEFAULT NULL
|
||||||
|
,pEncoding IN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING%TYPE DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_COLUMN_DATE_FORMAT
|
||||||
|
* @desc Insert a new record to A_COLUMN_DATE_FORMAT table.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_COLUMN_DATE_FORMAT (
|
||||||
|
pTemplateTableName IN CT_MRDS.A_COLUMN_DATE_FORMAT.TEMPLATE_TABLE_NAME%TYPE
|
||||||
|
,pColumnName IN CT_MRDS.A_COLUMN_DATE_FORMAT.COLUMN_NAME%TYPE
|
||||||
|
,pDateFormat IN CT_MRDS.A_COLUMN_DATE_FORMAT.DATE_FORMAT%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUCKET_URI
|
||||||
|
* @desc Function used to get string with bucket http url.
|
||||||
|
* Possible input values for pBucketArea are: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example select FILE_MANAGER.GET_BUCKET_URI(pBucketArea => 'ODS') from dual;
|
||||||
|
* @ex_rslt https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUCKET_URI(pBucketArea VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_CONFIG_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_CONFIG record
|
||||||
|
* for specified pSourceFileConfigKey (A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY).
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
* pSourceFileConfigKey => 128
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about File Configuration:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 128
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Details about related Container Config:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 126
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Column Date Format config entries:
|
||||||
|
* --------------------------------
|
||||||
|
* TEMPLATE_TABLE_NAME = CT_ET_TEMPLATES.C2D_UC_MA_DISSEM
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_RECEIVED_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_RECEIVED record
|
||||||
|
* for specified pSourceFileReceivedKey (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY).
|
||||||
|
* If pIncludeConfigInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
* pSourceFileReceivedKey => 377
|
||||||
|
* ,pIncludeConfigInfo => 1
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
*
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
pSourceFileReceivedKey IN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE
|
||||||
|
,pIncludeConfigInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_USER_LOAD_OPERATIONS
|
||||||
|
* @desc Function returns details from USER_LOAD_OPERATIONS table
|
||||||
|
* for specified pOperationId.
|
||||||
|
* @example select FILE_MANAGER.GET_DET_USER_LOAD_OPERATIONS (pOperationId => 3608) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about USER_LOAD_OPERATIONS where ID = 3608
|
||||||
|
* --------------------------------
|
||||||
|
* ID = 3608
|
||||||
|
* TYPE = VALIDATE
|
||||||
|
* SID = 31260
|
||||||
|
* SERIAL# = 52915
|
||||||
|
* START_TIME = 2025-05-20 10.08.24.436983 EUROPE/BELGRADE
|
||||||
|
* UPDATE_TIME = 2025-05-20 10.08.24.458643 EUROPE/BELGRADE
|
||||||
|
* STATUS = FAILED
|
||||||
|
* OWNER_NAME = CT_MRDS
|
||||||
|
* TABLE_NAME = STANDING_FACILITIES_HEADER
|
||||||
|
* PARTITION_NAME =
|
||||||
|
* SUBPARTITION_NAME =
|
||||||
|
* FILE_URI_LIST =
|
||||||
|
* ROWS_LOADED =
|
||||||
|
* LOGFILE_TABLE = VALIDATE$3608_LOG
|
||||||
|
* BADFILE_TABLE = VALIDATE$3608_BAD
|
||||||
|
* STATUS_TABLE =
|
||||||
|
* TEMPEXT_TABLE =
|
||||||
|
* CREDENTIAL_NAME =
|
||||||
|
* EXPIRATION_TIME = 2025-05-22 10.08.24.436983000 EUROPE/BELGRADE
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_USER_LOAD_OPERATIONS (
|
||||||
|
pOperationId PLS_INTEGER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ANALYZE_VALIDATION_ERRORS
|
||||||
|
* @desc Wrapper function that analyzes validation errors for a source file using its received key.
|
||||||
|
* Automatically derives template schema, table name, CSV URI and validation log table
|
||||||
|
* from file metadata and calls ENV_MANAGER.ANALYZE_VALIDATION_ERRORS.
|
||||||
|
* @example SELECT FILE_MANAGER.ANALYZE_VALIDATION_ERRORS(63) FROM DUAL;
|
||||||
|
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||||
|
**/
|
||||||
|
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION
|
||||||
|
* @desc Returns the current version number of the FILE_MANAGER package.
|
||||||
|
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION() FROM DUAL;
|
||||||
|
* @ex_rslt 3.2.0
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUILD_INFO
|
||||||
|
* @desc Returns comprehensive build information including version, build date, and author.
|
||||||
|
* Uses centralized ENV_MANAGER.GET_PACKAGE_VERSION_INFO function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||||
|
* @ex_rslt Package: FILE_MANAGER
|
||||||
|
* Version: 3.2.0
|
||||||
|
* Build Date: 2025-10-22 16:30:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION_HISTORY
|
||||||
|
* @desc Returns complete version history with all releases and changes.
|
||||||
|
* Uses centralized ENV_MANAGER.FORMAT_VERSION_HISTORY function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||||
|
* @ex_rslt FILE_MANAGER Version History:
|
||||||
|
* 3.2.0 (2025-10-22): Added package versioning system...
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
|
|
||||||
|
/
|
||||||
@@ -181,7 +181,7 @@ AS
|
|||||||
BEGIN
|
BEGIN
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'ARCHIVED'
|
SET PROCESSING_STATUS = 'ARCHIVED'
|
||||||
,ARCH_FILE_NAME = vUri||vFilename
|
,ARCH_PATH = vUri||vFilename
|
||||||
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
AND r.processing_status = 'INGESTED'
|
AND r.processing_status = 'INGESTED'
|
||||||
@@ -244,7 +244,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'INGESTED'
|
SET PROCESSING_STATUS = 'INGESTED'
|
||||||
,ARCH_FILE_NAME = NULL
|
,ARCH_PATH = NULL
|
||||||
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
;
|
;
|
||||||
|
|||||||
@@ -231,7 +231,7 @@ AS
|
|||||||
BEGIN
|
BEGIN
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'ARCHIVED'
|
SET PROCESSING_STATUS = 'ARCHIVED'
|
||||||
,ARCH_FILE_NAME = vUri||vFilename
|
,ARCH_PATH = vUri||vFilename
|
||||||
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
AND r.processing_status = 'INGESTED'
|
AND r.processing_status = 'INGESTED'
|
||||||
@@ -294,7 +294,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'INGESTED'
|
SET PROCESSING_STATUS = 'INGESTED'
|
||||||
,ARCH_FILE_NAME = NULL
|
,ARCH_PATH = NULL
|
||||||
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
;
|
;
|
||||||
|
|||||||
@@ -223,7 +223,7 @@ AS
|
|||||||
|
|
||||||
-- Note: DBMS_CLOUD.EXPORT_DATA may create multiple parquet files (parallel execution)
|
-- Note: DBMS_CLOUD.EXPORT_DATA may create multiple parquet files (parallel execution)
|
||||||
-- Instead of tracking individual files, we store the archive directory prefix
|
-- Instead of tracking individual files, we store the archive directory prefix
|
||||||
-- ARCH_FILE_NAME will contain the directory URI where all parquet files are located
|
-- ARCH_PATH will contain the directory URI where all parquet files are located
|
||||||
vFilename := vUri; -- Store directory prefix instead of individual filename
|
vFilename := vUri; -- Store directory prefix instead of individual filename
|
||||||
|
|
||||||
-- Try to drop EXPORTED FILES ("regular data files")
|
-- Try to drop EXPORTED FILES ("regular data files")
|
||||||
@@ -234,7 +234,7 @@ AS
|
|||||||
BEGIN
|
BEGIN
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED' -- Status reflects file is archived and kept in TRASH
|
SET PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED' -- Status reflects file is archived and kept in TRASH
|
||||||
,ARCH_FILE_NAME = vFilename -- Now contains directory prefix, not individual file
|
,ARCH_PATH = vFilename -- Now contains directory prefix, not individual file
|
||||||
,PARTITION_YEAR = ym_loop.year -- Record which partition year the data was archived to
|
,PARTITION_YEAR = ym_loop.year -- Record which partition year the data was archived to
|
||||||
,PARTITION_MONTH = ym_loop.month -- Record which partition month the data was archived to
|
,PARTITION_MONTH = ym_loop.month -- Record which partition month the data was archived to
|
||||||
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
WHERE r.a_source_file_config_key= pSourceFileConfigKey
|
||||||
@@ -313,7 +313,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
SET PROCESSING_STATUS = 'INGESTED'
|
SET PROCESSING_STATUS = 'INGESTED'
|
||||||
,ARCH_FILE_NAME = NULL
|
,ARCH_PATH = NULL
|
||||||
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
WHERE r.a_source_file_config_key = pSourceFileConfigKey
|
||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
;
|
;
|
||||||
@@ -568,7 +568,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
SET PROCESSING_STATUS = 'INGESTED',
|
SET PROCESSING_STATUS = 'INGESTED',
|
||||||
ARCH_FILE_NAME = NULL,
|
ARCH_PATH = NULL,
|
||||||
PARTITION_YEAR = NULL,
|
PARTITION_YEAR = NULL,
|
||||||
PARTITION_MONTH = NULL
|
PARTITION_MONTH = NULL
|
||||||
WHERE A_SOURCE_FILE_RECEIVED_KEY = pSourceFileReceivedKey
|
WHERE A_SOURCE_FILE_RECEIVED_KEY = pSourceFileReceivedKey
|
||||||
@@ -609,7 +609,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
SET PROCESSING_STATUS = 'INGESTED',
|
SET PROCESSING_STATUS = 'INGESTED',
|
||||||
ARCH_FILE_NAME = NULL,
|
ARCH_PATH = NULL,
|
||||||
PARTITION_YEAR = NULL,
|
PARTITION_YEAR = NULL,
|
||||||
PARTITION_MONTH = NULL
|
PARTITION_MONTH = NULL
|
||||||
WHERE A_SOURCE_FILE_CONFIG_KEY = pSourceFileConfigKey
|
WHERE A_SOURCE_FILE_CONFIG_KEY = pSourceFileConfigKey
|
||||||
@@ -649,7 +649,7 @@ AS
|
|||||||
|
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
SET PROCESSING_STATUS = 'INGESTED',
|
SET PROCESSING_STATUS = 'INGESTED',
|
||||||
ARCH_FILE_NAME = NULL,
|
ARCH_PATH = NULL,
|
||||||
PARTITION_YEAR = NULL,
|
PARTITION_YEAR = NULL,
|
||||||
PARTITION_MONTH = NULL
|
PARTITION_MONTH = NULL
|
||||||
WHERE PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
WHERE PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
||||||
|
|||||||
@@ -29,7 +29,8 @@ DECLARE
|
|||||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
vPackageList t_string_array := t_string_array(
|
vPackageList t_string_array := t_string_array(
|
||||||
'CT_MRDS.FILE_ARCHIVER'
|
'CT_MRDS.FILE_ARCHIVER',
|
||||||
|
'CT_MRDS.FILE_MANAGER'
|
||||||
);
|
);
|
||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
-- MARS-835-PREHOOK Installation Script 02: DATA_EXPORTER Package
|
-- MARS-835-PREHOOK Installation Script 02: DATA_EXPORTER Package
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
-- Purpose: Deploy updated DATA_EXPORTER package (SPEC + BODY) with parallel processing
|
-- Purpose: Deploy updated DATA_EXPORTER package (SPEC + BODY) v2.8.1
|
||||||
-- Schema: CT_MRDS
|
-- Schema: CT_MRDS
|
||||||
-- Object: PACKAGE DATA_EXPORTER
|
-- Object: PACKAGE DATA_EXPORTER
|
||||||
|
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
@@ -13,8 +14,8 @@ PROMPT =========================================================================
|
|||||||
PROMPT MARS-835-PREHOOK: Installing CT_MRDS.DATA_EXPORTER Package
|
PROMPT MARS-835-PREHOOK: Installing CT_MRDS.DATA_EXPORTER Package
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
PROMPT Package: CT_MRDS.DATA_EXPORTER
|
PROMPT Package: CT_MRDS.DATA_EXPORTER
|
||||||
PROMPT Version: 2.2.0 -> 2.4.0 (MINOR)
|
PROMPT Version: 2.2.0 -> 2.8.1 (PATCH)
|
||||||
PROMPT Change: Added parallel processing + Smart Column Mapping for CSV exports
|
PROMPT Change: Fixed query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join for single file
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
|
|||||||
@@ -0,0 +1,70 @@
|
|||||||
|
-- ====================================================================
|
||||||
|
-- MARS-835-PREHOOK: Update A_SOURCE_FILE_RECEIVED Table Structure
|
||||||
|
-- ====================================================================
|
||||||
|
-- Purpose:
|
||||||
|
-- 1. Rename column ARCH_FILE_NAME to ARCH_PATH
|
||||||
|
-- 2. Add new column PROCESS_NAME VARCHAR2(200)
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-13
|
||||||
|
-- ====================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT ====================================================================
|
||||||
|
PROMPT MARS-835-PREHOOK: Updating A_SOURCE_FILE_RECEIVED table structure
|
||||||
|
PROMPT ====================================================================
|
||||||
|
|
||||||
|
-- Check if column ARCH_FILE_NAME exists
|
||||||
|
DECLARE
|
||||||
|
v_column_exists NUMBER;
|
||||||
|
v_process_name_exists NUMBER;
|
||||||
|
BEGIN
|
||||||
|
-- Check if ARCH_FILE_NAME exists
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO v_column_exists
|
||||||
|
FROM dba_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||||
|
AND column_name = 'ARCH_FILE_NAME';
|
||||||
|
|
||||||
|
IF v_column_exists > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Renaming column ARCH_FILE_NAME to ARCH_PATH...');
|
||||||
|
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED RENAME COLUMN ARCH_FILE_NAME TO ARCH_PATH';
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column renamed to ARCH_PATH');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Column ARCH_FILE_NAME does not exist (already renamed or first install)');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Check if PROCESS_NAME already exists
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO v_process_name_exists
|
||||||
|
FROM dba_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||||
|
AND column_name = 'PROCESS_NAME';
|
||||||
|
|
||||||
|
IF v_process_name_exists = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Adding new column PROCESS_NAME...');
|
||||||
|
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED ADD (PROCESS_NAME VARCHAR2(200))';
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column PROCESS_NAME added');
|
||||||
|
|
||||||
|
-- Add comment on new column
|
||||||
|
EXECUTE IMMEDIATE 'COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESS_NAME IS ''Name of the process that created this record''';
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Comment added to PROCESS_NAME column');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Column PROCESS_NAME already exists');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: A_SOURCE_FILE_RECEIVED table structure updated successfully');
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to update table structure: ' || SQLERRM);
|
||||||
|
RAISE;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT ====================================================================
|
||||||
|
PROMPT A_SOURCE_FILE_RECEIVED Table Update Completed
|
||||||
|
PROMPT ====================================================================
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
-- ====================================================================
|
||||||
|
-- MARS-835-PREHOOK ROLLBACK: Revert A_SOURCE_FILE_RECEIVED Table Structure
|
||||||
|
-- ====================================================================
|
||||||
|
-- Purpose:
|
||||||
|
-- 1. Rename column ARCH_PATH back to ARCH_FILE_NAME
|
||||||
|
-- 2. Remove column PROCESS_NAME
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-13
|
||||||
|
-- ====================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT ====================================================================
|
||||||
|
PROMPT MARS-835-PREHOOK ROLLBACK: Reverting A_SOURCE_FILE_RECEIVED table
|
||||||
|
PROMPT ====================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
v_column_exists NUMBER;
|
||||||
|
v_process_name_exists NUMBER;
|
||||||
|
BEGIN
|
||||||
|
-- Check if ARCH_PATH exists (needs to be renamed back)
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO v_column_exists
|
||||||
|
FROM dba_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||||
|
AND column_name = 'ARCH_PATH';
|
||||||
|
|
||||||
|
IF v_column_exists > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Renaming column ARCH_PATH back to ARCH_FILE_NAME...');
|
||||||
|
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED RENAME COLUMN ARCH_PATH TO ARCH_FILE_NAME';
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column renamed back to ARCH_FILE_NAME');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Column ARCH_PATH does not exist (already rolled back)');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Check if PROCESS_NAME exists (needs to be dropped)
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO v_process_name_exists
|
||||||
|
FROM dba_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||||
|
AND column_name = 'PROCESS_NAME';
|
||||||
|
|
||||||
|
IF v_process_name_exists > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Dropping column PROCESS_NAME...');
|
||||||
|
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED DROP COLUMN PROCESS_NAME';
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column PROCESS_NAME dropped');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: Column PROCESS_NAME does not exist (already rolled back)');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: A_SOURCE_FILE_RECEIVED table structure rollback completed');
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to rollback table structure: ' || SQLERRM);
|
||||||
|
RAISE;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT ====================================================================
|
||||||
|
PROMPT A_SOURCE_FILE_RECEIVED Table Rollback Completed
|
||||||
|
PROMPT ====================================================================
|
||||||
@@ -31,6 +31,7 @@ PROMPT =========================================================================
|
|||||||
PROMPT
|
PROMPT
|
||||||
PROMPT This script will:
|
PROMPT This script will:
|
||||||
PROMPT - Create A_PARALLEL_EXPORT_CHUNKS table with unique timestamp task names
|
PROMPT - Create A_PARALLEL_EXPORT_CHUNKS table with unique timestamp task names
|
||||||
|
PROMPT - Update A_SOURCE_FILE_RECEIVED table (rename ARCH_FILE_NAME to ARCH_PATH, add PROCESS_NAME column)
|
||||||
PROMPT - Update ENV_MANAGER to v3.2.0 (add parallel execution error codes)
|
PROMPT - Update ENV_MANAGER to v3.2.0 (add parallel execution error codes)
|
||||||
PROMPT - Update DATA_EXPORTER to v2.4.0 (DBMS_PARALLEL_EXECUTE + Smart Column Mapping)
|
PROMPT - Update DATA_EXPORTER to v2.4.0 (DBMS_PARALLEL_EXECUTE + Smart Column Mapping)
|
||||||
PROMPT - Add pParallelDegree parameter (1-16 threads) to EXPORT_*_BY_DATE procedures
|
PROMPT - Add pParallelDegree parameter (1-16 threads) to EXPORT_*_BY_DATE procedures
|
||||||
@@ -59,25 +60,31 @@ PROMPT =========================================================================
|
|||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT Step 2: Deploy ENV_MANAGER Package
|
PROMPT Step 2: Update A_SOURCE_FILE_RECEIVED Table Structure
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@03_MARS_835_PREHOOK_update_SOURCE_FILE_RECEIVED_table.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 3: Deploy ENV_MANAGER Package
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@01_MARS_835_PREHOOK_install_ENV_MANAGER.sql
|
@@01_MARS_835_PREHOOK_install_ENV_MANAGER.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT Step 3: Deploy DATA_EXPORTER Package
|
PROMPT Step 4: Deploy DATA_EXPORTER Package
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@02_MARS_835_PREHOOK_install_DATA_EXPORTER.sql
|
@@02_MARS_835_PREHOOK_install_DATA_EXPORTER.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT Step 4: Track Package Versions
|
PROMPT Step 5: Track Package Versions
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@track_package_versions.sql
|
@@track_package_versions.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT Step 5: Verify Package Versions
|
PROMPT Step 6: Verify Package Versions
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@verify_packages_version.sql
|
@@verify_packages_version.sql
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ END;
|
|||||||
/
|
/
|
||||||
|
|
||||||
CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
||||||
CHUNK_ID NUMBER PRIMARY KEY,
|
CHUNK_ID NUMBER NOT NULL,
|
||||||
TASK_NAME VARCHAR2(100) NOT NULL,
|
TASK_NAME VARCHAR2(100) NOT NULL,
|
||||||
YEAR_VALUE VARCHAR2(4) NOT NULL,
|
YEAR_VALUE VARCHAR2(4) NOT NULL,
|
||||||
MONTH_VALUE VARCHAR2(2) NOT NULL,
|
MONTH_VALUE VARCHAR2(2) NOT NULL,
|
||||||
@@ -43,17 +43,20 @@ CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
|||||||
FILE_BASE_NAME VARCHAR2(1000),
|
FILE_BASE_NAME VARCHAR2(1000),
|
||||||
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
||||||
MAX_FILE_SIZE NUMBER DEFAULT 104857600 NOT NULL,
|
MAX_FILE_SIZE NUMBER DEFAULT 104857600 NOT NULL,
|
||||||
|
JOB_CLASS VARCHAR2(128),
|
||||||
STATUS VARCHAR2(30) DEFAULT 'PENDING' NOT NULL,
|
STATUS VARCHAR2(30) DEFAULT 'PENDING' NOT NULL,
|
||||||
ERROR_MESSAGE VARCHAR2(4000),
|
ERROR_MESSAGE VARCHAR2(4000),
|
||||||
EXPORT_TIMESTAMP TIMESTAMP,
|
EXPORT_TIMESTAMP TIMESTAMP,
|
||||||
CREATED_DATE TIMESTAMP DEFAULT SYSTIMESTAMP NOT NULL
|
CREATED_DATE TIMESTAMP DEFAULT SYSTIMESTAMP NOT NULL,
|
||||||
|
CONSTRAINT PK_PARALLEL_EXPORT_CHUNKS PRIMARY KEY (TASK_NAME, CHUNK_ID)
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX IX_PARALLEL_CHUNKS_TASK ON CT_MRDS.A_PARALLEL_EXPORT_CHUNKS(TASK_NAME);
|
-- Index for status-based queries (e.g., WHERE STATUS = 'FAILED' AND TASK_NAME = ?)
|
||||||
|
CREATE INDEX IX_PARALLEL_CHUNKS_STATUS_TASK ON CT_MRDS.A_PARALLEL_EXPORT_CHUNKS(STATUS, TASK_NAME);
|
||||||
|
|
||||||
COMMENT ON TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS IS 'Permanent table for parallel export chunk processing (DBMS_PARALLEL_EXECUTE) - permanent because GTT data not visible in parallel callback sessions';
|
COMMENT ON TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS IS 'Permanent table for parallel export chunk processing (DBMS_PARALLEL_EXECUTE) - permanent because GTT data not visible in parallel callback sessions. PK: (TASK_NAME, CHUNK_ID) ensures session isolation for concurrent exports.';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.CHUNK_ID IS 'Unique chunk identifier (partition number)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.CHUNK_ID IS 'Chunk identifier within task (partition number) - unique per TASK_NAME, not globally';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TASK_NAME IS 'DBMS_PARALLEL_EXECUTE task name for cleanup';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TASK_NAME IS 'DBMS_PARALLEL_EXECUTE task name - session isolation key, part of composite PK with CHUNK_ID';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.YEAR_VALUE IS 'Partition year (YYYY)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.YEAR_VALUE IS 'Partition year (YYYY)';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MONTH_VALUE IS 'Partition month (MM)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MONTH_VALUE IS 'Partition month (MM)';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.SCHEMA_NAME IS 'Schema owning the source table';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.SCHEMA_NAME IS 'Schema owning the source table';
|
||||||
@@ -69,6 +72,7 @@ COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FORMAT_TYPE IS 'Export format
|
|||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FILE_BASE_NAME IS 'Base filename for CSV exports (NULL for Parquet)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FILE_BASE_NAME IS 'Base filename for CSV exports (NULL for Parquet)';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TEMPLATE_TABLE_NAME IS 'Template table name for per-column date format configuration (e.g., CT_ET_TEMPLATES.TABLE_NAME)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TEMPLATE_TABLE_NAME IS 'Template table name for per-column date format configuration (e.g., CT_ET_TEMPLATES.TABLE_NAME)';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MAX_FILE_SIZE IS 'Maximum file size in bytes for CSV exports only (e.g., 104857600 = 100MB, 1073741824 = 1GB) - default 100MB (104857600). NOTE: Not applicable for PARQUET format (Oracle limitation)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MAX_FILE_SIZE IS 'Maximum file size in bytes for CSV exports only (e.g., 104857600 = 100MB, 1073741824 = 1GB) - default 100MB (104857600). NOTE: Not applicable for PARQUET format (Oracle limitation)';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.JOB_CLASS IS 'Oracle Scheduler job class name for resource management (e.g., ''high'', ''DEFAULT_JOB_CLASS'') - NULL uses default scheduler priority';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.STATUS IS 'Chunk processing status: PENDING (not started), PROCESSING (in progress), COMPLETED (success), FAILED (error) - allows retry of failed partitions only';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.STATUS IS 'Chunk processing status: PENDING (not started), PROCESSING (in progress), COMPLETED (success), FAILED (error) - allows retry of failed partitions only';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.ERROR_MESSAGE IS 'Error message if chunk processing failed (STATUS = FAILED)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.ERROR_MESSAGE IS 'Error message if chunk processing failed (STATUS = FAILED)';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.EXPORT_TIMESTAMP IS 'Timestamp when chunk export was completed (STATUS = COMPLETED)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.EXPORT_TIMESTAMP IS 'Timestamp when chunk export was completed (STATUS = COMPLETED)';
|
||||||
|
|||||||
@@ -0,0 +1,30 @@
|
|||||||
|
-- ====================================================================
|
||||||
|
-- A_SOURCE_FILE_RECEIVED Table
|
||||||
|
-- ====================================================================
|
||||||
|
-- Purpose: Track received files and their processing status
|
||||||
|
-- ====================================================================
|
||||||
|
|
||||||
|
CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||||
|
A_SOURCE_FILE_RECEIVED_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
A_SOURCE_FILE_CONFIG_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
SOURCE_FILE_NAME VARCHAR2(1000) NOT NULL,
|
||||||
|
CHECKSUM VARCHAR2(128),
|
||||||
|
CREATED TIMESTAMP(6) WITH TIME ZONE,
|
||||||
|
BYTES NUMBER,
|
||||||
|
RECEPTION_DATE DATE NOT NULL,
|
||||||
|
PROCESSING_STATUS VARCHAR2(200),
|
||||||
|
EXTERNAL_TABLE_NAME VARCHAR2(200),
|
||||||
|
PARTITION_YEAR VARCHAR2(4),
|
||||||
|
PARTITION_MONTH VARCHAR2(2),
|
||||||
|
ARCH_PATH VARCHAR2(1000),
|
||||||
|
PROCESS_NAME VARCHAR2(200),
|
||||||
|
CONSTRAINT A_SOURCE_FILE_RECEIVED_PK PRIMARY KEY (A_SOURCE_FILE_RECEIVED_KEY),
|
||||||
|
CONSTRAINT ASFR_A_SOURCE_FILE_CONFIG_KEY_FK FOREIGN KEY(A_SOURCE_FILE_CONFIG_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||||
|
CONSTRAINT A_SOURCE_FILE_RECEIVED_CHK CHECK (PROCESSING_STATUS IN ('RECEIVED', 'VALIDATED', 'READY_FOR_INGESTION', 'INGESTED', 'ARCHIVED'))
|
||||||
|
) TABLESPACE "DATA";
|
||||||
|
|
||||||
|
-- Unique index for file identification (workaround for TIMESTAMP WITH TIMEZONE constraint limitation)
|
||||||
|
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||||
|
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||||
|
|
||||||
|
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -9,23 +9,17 @@ AS
|
|||||||
**/
|
**/
|
||||||
|
|
||||||
-- Package Version Information
|
-- Package Version Information
|
||||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.6.3';
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.14.0';
|
||||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2026-01-28 19:30:00';
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-25 09:00:00';
|
||||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
-- Version History (last 3-5 changes)
|
-- Version History (last 3-5 changes)
|
||||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM and DBMS_UTILITY.FORMAT_ERROR_BACKTRACE cannot be used directly in SQL UPDATE statements. Now properly assigned to vgMsgTmp variable before UPDATE.' || CHR(10) ||
|
'v2.14.0 (2026-02-25): OPTIMIZATION - Added pTaskName parameter to EXPORT_PARTITION_PARALLEL for deterministic filtering. Replaced FETCH FIRST 1 ROW ONLY safeguard with precise WHERE CHUNK_ID AND TASK_NAME filter. Eliminates ORDER BY overhead and provides cleaner session isolation.' || CHR(10) ||
|
||||||
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Changed DELETE to filter by age (>24h) instead of deleting all COMPLETED chunks. Prevents concurrent sessions from deleting each other chunks. Session-safe cleanup with TASK_NAME filtering. Enables true parallel execution of multiple export jobs.' || CHR(10) ||
|
'v2.13.1 (2026-02-25): CRITICAL FIX - Added START_ID and END_ID aliasses in CREATE_CHUNKS_BY_SQL to avoid ORA-00960 ambiguous column naming error.' || CHR(10) ||
|
||||||
'v2.6.1 (2026-01-28): Added DELETE_FAILED_EXPORT_FILE procedure to clean up partial/corrupted files before retry. When partition fails mid-export, partial file is deleted before retry to prevent Oracle from creating _1 suffixed duplicates. Ensures clean retry without orphaned files in OCI bucket.' || CHR(10) ||
|
'v2.13.0 (2026-02-25): CRITICAL SESSION ISOLATION FIX - Changed CREATE_CHUNKS_BY_NUMBER_COL to CREATE_CHUNKS_BY_SQL with TASK_NAME filter (fixes ORA-01422 in concurrent sessions). Added ORDER BY CREATED_DATE DESC FETCH FIRST 1 ROW safeguard to EXPORT_PARTITION_PARALLEL SELECT. Composite PK (TASK_NAME, CHUNK_ID) now fully functional.' || CHR(10) ||
|
||||||
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry. System now restarts ONLY failed partitions instead of re-exporting all data. Added ERROR_MESSAGE and EXPORT_TIMESTAMP columns for better error handling and monitoring. Prevents duplicate file creation when parallel tasks fail (e.g., 22 partitions with 16 threads, 3 failures no longer duplicates 19 successful exports).' || CHR(10) ||
|
'v2.12.0 (2026-02-24): CRITICAL FIX - Rewritten DELETE_FAILED_EXPORT_FILE to use file-specific pattern matching (prevents deleting parallel CSV chunks in shared folder). Added vQuery logging before DBMS_CLOUD calls. Added CSV maxfilesize logging.' || CHR(10) ||
|
||||||
'v2.5.0 (2026-01-26): Added recorddelimiter parameter with CRLF (CHR(13)||CHR(10)) for CSV exports to ensure Windows-compatible line endings. Improves cross-platform compatibility when CSV files are opened in Windows applications (Notepad, Excel).' || CHR(10) ||
|
'v2.11.0 (2026-02-18): Added pJobClass parameter to EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE for Oracle Scheduler job class support (resource/priority management).' || CHR(10);
|
||||||
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || CHR(10) ||
|
|
||||||
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
|
|
||||||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
|
||||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
|
||||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
|
||||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10);
|
|
||||||
|
|
||||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
vgMsgTmp VARCHAR2(32000);
|
vgMsgTmp VARCHAR2(32000);
|
||||||
@@ -60,10 +54,12 @@ AS
|
|||||||
* but should NOT be called directly by external code.
|
* but should NOT be called directly by external code.
|
||||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||||
|
* @param pTaskName - Task name for session isolation (optional, DEFAULT NULL for backward compatibility)
|
||||||
**/
|
**/
|
||||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
pStartId IN NUMBER,
|
pStartId IN NUMBER,
|
||||||
pEndId IN NUMBER
|
pEndId IN NUMBER,
|
||||||
|
pTaskName IN VARCHAR2 DEFAULT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
@@ -73,8 +69,19 @@ AS
|
|||||||
/**
|
/**
|
||||||
* @name EXPORT_TABLE_DATA
|
* @name EXPORT_TABLE_DATA
|
||||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
* Exports data into CSV file on OCI infrustructure.
|
* Exports data into single CSV file on OCI infrastructure.
|
||||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* Supports template table for column order and per-column date formatting.
|
||||||
|
* When pRegisterExport=TRUE, successfully exported file is registered in:
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||||
|
* @param pFileName - Optional filename (e.g., 'export.csv'). NULL = auto-generate from table name
|
||||||
|
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||||
|
* - Column order control (template defines CSV structure)
|
||||||
|
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||||
|
* - NULL = use source table columns in natural order
|
||||||
|
* @param pMaxFileSize - Maximum file size in bytes (default 104857600 = 100MB, min 10MB, max 1GB)
|
||||||
|
* @param pRegisterExport - When TRUE, registers exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||||
|
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||||
* @example
|
* @example
|
||||||
* begin
|
* begin
|
||||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
@@ -82,7 +89,11 @@ AS
|
|||||||
* pTableName => 'MY_TABLE',
|
* pTableName => 'MY_TABLE',
|
||||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
* pBucketArea => 'DATA',
|
* pBucketArea => 'DATA',
|
||||||
* pFolderName => 'csv_exports'
|
* pFolderName => 'csv_exports',
|
||||||
|
* pFileName => 'my_export.csv', -- Optional
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||||
|
* pMaxFileSize => 104857600, -- Optional, default 100MB
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||||
* );
|
* );
|
||||||
* end;
|
* end;
|
||||||
**/
|
**/
|
||||||
@@ -92,6 +103,11 @@ AS
|
|||||||
pKeyColumnName IN VARCHAR2,
|
pKeyColumnName IN VARCHAR2,
|
||||||
pBucketArea IN VARCHAR2,
|
pBucketArea IN VARCHAR2,
|
||||||
pFolderName IN VARCHAR2,
|
pFolderName IN VARCHAR2,
|
||||||
|
pFileName IN VARCHAR2 default NULL,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -133,6 +149,7 @@ AS
|
|||||||
pMaxDate IN DATE default SYSDATE,
|
pMaxDate IN DATE default SYSDATE,
|
||||||
pParallelDegree IN NUMBER default 1,
|
pParallelDegree IN NUMBER default 1,
|
||||||
pTemplateTableName IN VARCHAR2 default NULL,
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pJobClass IN VARCHAR2 default NULL,
|
||||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -146,6 +163,9 @@ AS
|
|||||||
* but exports to CSV format instead of Parquet.
|
* but exports to CSV format instead of Parquet.
|
||||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||||
|
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||||
|
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||||
* @example
|
* @example
|
||||||
* begin
|
* begin
|
||||||
* -- With custom filename
|
* -- With custom filename
|
||||||
@@ -158,7 +178,8 @@ AS
|
|||||||
* pFileName => 'my_export.csv',
|
* pFileName => 'my_export.csv',
|
||||||
* pMinDate => DATE '2024-01-01',
|
* pMinDate => DATE '2024-01-01',
|
||||||
* pMaxDate => SYSDATE,
|
* pMaxDate => SYSDATE,
|
||||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||||
* );
|
* );
|
||||||
*
|
*
|
||||||
* -- With auto-generated filename (based on table name only)
|
* -- With auto-generated filename (based on table name only)
|
||||||
@@ -169,7 +190,8 @@ AS
|
|||||||
* pBucketArea => 'ARCHIVE',
|
* pBucketArea => 'ARCHIVE',
|
||||||
* pFolderName => 'exports',
|
* pFolderName => 'exports',
|
||||||
* pMinDate => DATE '2025-09-01',
|
* pMinDate => DATE '2025-09-01',
|
||||||
* pMaxDate => DATE '2025-09-17'
|
* pMaxDate => DATE '2025-09-17',
|
||||||
|
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||||
* );
|
* );
|
||||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
@@ -188,6 +210,9 @@ AS
|
|||||||
pParallelDegree IN NUMBER default 1,
|
pParallelDegree IN NUMBER default 1,
|
||||||
pTemplateTableName IN VARCHAR2 default NULL,
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
pMaxFileSize IN NUMBER default 104857600,
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||||
|
pJobClass IN VARCHAR2 default NULL,
|
||||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ PROMPT MARS-835-PREHOOK: Rollback to Previous Versions
|
|||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT WARNING: This will reverse all changes from MARS-835-PREHOOK installation!
|
PROMPT WARNING: This will reverse all changes from MARS-835-PREHOOK installation!
|
||||||
PROMPT - Removes A_PARALLEL_EXPORT_CHUNKS table
|
PROMPT - Removes A_PARALLEL_EXPORT_CHUNKS table
|
||||||
|
PROMPT - Reverts A_SOURCE_FILE_RECEIVED table (rename ARCH_PATH to ARCH_FILE_NAME, drop PROCESS_NAME column)
|
||||||
PROMPT - Restores ENV_MANAGER v3.1.0 (removes parallel error codes)
|
PROMPT - Restores ENV_MANAGER v3.1.0 (removes parallel error codes)
|
||||||
PROMPT - Restores DATA_EXPORTER v2.1.0 (removes parallel + Smart Column Mapping)
|
PROMPT - Restores DATA_EXPORTER v2.1.0 (removes parallel + Smart Column Mapping)
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@ -65,13 +66,19 @@ PROMPT =========================================================================
|
|||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT Step 3: Track Rollback Version
|
PROMPT Step 3: Rollback A_SOURCE_FILE_RECEIVED Table Structure
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@93_MARS_835_PREHOOK_rollback_SOURCE_FILE_RECEIVED_table.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 4: Track Rollback Version
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@track_package_versions.sql
|
@@track_package_versions.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT Step 4: Verify Package Versions After Rollback
|
PROMPT Step 5: Verify Package Versions After Rollback
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@verify_packages_version.sql
|
@@verify_packages_version.sql
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,29 @@
|
|||||||
|
-- ====================================================================
|
||||||
|
-- A_SOURCE_FILE_RECEIVED Table
|
||||||
|
-- ====================================================================
|
||||||
|
-- Purpose: Track received files and their processing status
|
||||||
|
-- ====================================================================
|
||||||
|
|
||||||
|
CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||||
|
A_SOURCE_FILE_RECEIVED_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
A_SOURCE_FILE_CONFIG_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
SOURCE_FILE_NAME VARCHAR2(1000) NOT NULL,
|
||||||
|
CHECKSUM VARCHAR2(128),
|
||||||
|
CREATED TIMESTAMP(6) WITH TIME ZONE,
|
||||||
|
BYTES NUMBER,
|
||||||
|
RECEPTION_DATE DATE NOT NULL,
|
||||||
|
PROCESSING_STATUS VARCHAR2(200),
|
||||||
|
EXTERNAL_TABLE_NAME VARCHAR2(200),
|
||||||
|
PARTITION_YEAR VARCHAR2(4),
|
||||||
|
PARTITION_MONTH VARCHAR2(2),
|
||||||
|
ARCH_FILE_NAME VARCHAR2(1000),
|
||||||
|
CONSTRAINT A_SOURCE_FILE_RECEIVED_PK PRIMARY KEY (A_SOURCE_FILE_RECEIVED_KEY),
|
||||||
|
CONSTRAINT ASFR_A_SOURCE_FILE_CONFIG_KEY_FK FOREIGN KEY(A_SOURCE_FILE_CONFIG_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||||
|
CONSTRAINT A_SOURCE_FILE_RECEIVED_CHK CHECK (PROCESSING_STATUS IN ('RECEIVED', 'VALIDATED', 'READY_FOR_INGESTION', 'INGESTED', 'ARCHIVED'))
|
||||||
|
) TABLESPACE "DATA";
|
||||||
|
|
||||||
|
-- Unique index for file identification (workaround for TIMESTAMP WITH TIMEZONE constraint limitation)
|
||||||
|
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||||
|
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||||
|
|
||||||
|
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,218 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||||
|
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.6.3';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2026-01-28 19:30:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||||
|
|
||||||
|
-- Version History (last 3-5 changes)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM and DBMS_UTILITY.FORMAT_ERROR_BACKTRACE cannot be used directly in SQL UPDATE statements. Now properly assigned to vgMsgTmp variable before UPDATE.' || CHR(10) ||
|
||||||
|
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Changed DELETE to filter by age (>24h) instead of deleting all COMPLETED chunks. Prevents concurrent sessions from deleting each other chunks. Session-safe cleanup with TASK_NAME filtering. Enables true parallel execution of multiple export jobs.' || CHR(10) ||
|
||||||
|
'v2.6.1 (2026-01-28): Added DELETE_FAILED_EXPORT_FILE procedure to clean up partial/corrupted files before retry. When partition fails mid-export, partial file is deleted before retry to prevent Oracle from creating _1 suffixed duplicates. Ensures clean retry without orphaned files in OCI bucket.' || CHR(10) ||
|
||||||
|
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry. System now restarts ONLY failed partitions instead of re-exporting all data. Added ERROR_MESSAGE and EXPORT_TIMESTAMP columns for better error handling and monitoring. Prevents duplicate file creation when parallel tasks fail (e.g., 22 partitions with 16 threads, 3 failures no longer duplicates 19 successful exports).' || CHR(10) ||
|
||||||
|
'v2.5.0 (2026-01-26): Added recorddelimiter parameter with CRLF (CHR(13)||CHR(10)) for CSV exports to ensure Windows-compatible line endings. Improves cross-platform compatibility when CSV files are opened in Windows applications (Notepad, Excel).' || CHR(10) ||
|
||||||
|
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || CHR(10) ||
|
||||||
|
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
|
||||||
|
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||||
|
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||||
|
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||||
|
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record type for year/month partition information
|
||||||
|
**/
|
||||||
|
TYPE partition_rec IS RECORD (
|
||||||
|
year VARCHAR2(4),
|
||||||
|
month VARCHAR2(2)
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Table type for collection of partition records
|
||||||
|
**/
|
||||||
|
TYPE partition_tab IS TABLE OF partition_rec;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_PARTITION_PARALLEL
|
||||||
|
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||||
|
* Processes single partition (year/month) chunk in parallel task.
|
||||||
|
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||||
|
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||||
|
* but should NOT be called directly by external code.
|
||||||
|
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||||
|
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
|
pStartId IN NUMBER,
|
||||||
|
pEndId IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- MAIN EXPORT PROCEDURES
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into CSV file on OCI infrustructure.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'csv_exports'
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into PARQUET files on OCI infrustructure.
|
||||||
|
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||||
|
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||||
|
* Validates that all columns in pColumnList exist in the target table.
|
||||||
|
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'parquet_exports',
|
||||||
|
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||||
|
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||||
|
* Creates one CSV file for each year/month combination found in the data.
|
||||||
|
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||||
|
* but exports to CSV format instead of Parquet.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||||
|
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* -- With custom filename
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pFileName => 'my_export.csv',
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* -- With auto-generated filename (based on table name only)
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'OU_TOP',
|
||||||
|
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'ARCHIVE',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pMinDate => DATE '2025-09-01',
|
||||||
|
* pMaxDate => DATE '2025-09-17'
|
||||||
|
* );
|
||||||
|
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current package version number
|
||||||
|
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns comprehensive build information including version, date, and author
|
||||||
|
* return: Formatted string with complete build details
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the version history with recent changes
|
||||||
|
* return: Multi-line string with version history
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,227 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||||
|
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.4';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-11 12:10:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (last 3-5 changes)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename. Changes LIST_OBJECTS query to SELECT object_name and stores actual filename like LEGACY_DEBT_202508_1_20260211T111341375171Z.csv instead of LEGACY_DEBT_202508.csv. Enables accurate file tracking.' || CHR(10) ||
|
||||||
|
'v2.7.3 (2026-02-11): FIX LIKE pattern for DBMS_CLOUD.LIST_OBJECTS - Removed .csv extension from filename before pattern matching. Oracle EXPORT_DATA creates files with suffixes BEFORE .csv so LIKE pattern should be filename% not filename.csv%. Enables proper metadata retrieval (CHECKSUM, CREATED, BYTES).' || CHR(10) ||
|
||||||
|
'v2.7.2 (2026-02-11): FIX pRegisterExport in EXPORT_TABLE_DATA_TO_CSV_BY_DATE - Added missing pRegisterExport parameter to EXPORT_SINGLE_PARTITION call. Previously files were not registered because parameter was not passed through.' || CHR(10) ||
|
||||||
|
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName to automatically find config key from A_SOURCE_FILE_CONFIG. Example: ODS/CSDB/CSDB_DEBT_DAILY extracts SOURCE_KEY=CSDB, TABLE_ID=CSDB_DEBT_DAILY.' || CHR(10) ||
|
||||||
|
'v2.7.0 (2026-02-10): Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED with metadata from DBMS_CLOUD.LIST_OBJECTS. Enables file tracking and integrity verification.' || CHR(10);
|
||||||
|
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM and DBMS_UTILITY.FORMAT_ERROR_BACKTRACE cannot be used directly in SQL UPDATE statements. Now properly assigned to vgMsgTmp variable before UPDATE.' || CHR(10) ||
|
||||||
|
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Changed DELETE to filter by age (>24h) instead of deleting all COMPLETED chunks. Prevents concurrent sessions from deleting each other chunks. Session-safe cleanup with TASK_NAME filtering. Enables true parallel execution of multiple export jobs.' || CHR(10) ||
|
||||||
|
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry. System now restarts ONLY failed partitions instead of re-exporting all data. Added ERROR_MESSAGE and EXPORT_TIMESTAMP columns for better error handling and monitoring. Prevents duplicate file creation when parallel tasks fail (e.g., 22 partitions with 16 threads, 3 failures no longer duplicates 19 successful exports).' || CHR(10) ||
|
||||||
|
'v2.5.0 (2026-01-26): Added recorddelimiter parameter with CRLF (CHR(13)||CHR(10)) for CSV exports to ensure Windows-compatible line endings. Improves cross-platform compatibility when CSV files are opened in Windows applications (Notepad, Excel).' || CHR(10) ||
|
||||||
|
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || CHR(10) ||
|
||||||
|
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
|
||||||
|
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||||
|
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||||
|
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||||
|
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record type for year/month partition information
|
||||||
|
**/
|
||||||
|
TYPE partition_rec IS RECORD (
|
||||||
|
year VARCHAR2(4),
|
||||||
|
month VARCHAR2(2)
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Table type for collection of partition records
|
||||||
|
**/
|
||||||
|
TYPE partition_tab IS TABLE OF partition_rec;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_PARTITION_PARALLEL
|
||||||
|
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||||
|
* Processes single partition (year/month) chunk in parallel task.
|
||||||
|
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||||
|
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||||
|
* but should NOT be called directly by external code.
|
||||||
|
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||||
|
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
|
pStartId IN NUMBER,
|
||||||
|
pEndId IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- MAIN EXPORT PROCEDURES
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into CSV file on OCI infrustructure.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'csv_exports'
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into PARQUET files on OCI infrustructure.
|
||||||
|
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||||
|
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||||
|
* Validates that all columns in pColumnList exist in the target table.
|
||||||
|
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'parquet_exports',
|
||||||
|
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||||
|
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||||
|
* Creates one CSV file for each year/month combination found in the data.
|
||||||
|
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||||
|
* but exports to CSV format instead of Parquet.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||||
|
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||||
|
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* -- With custom filename
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pFileName => 'my_export.csv',
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* -- With auto-generated filename (based on table name only)
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'OU_TOP',
|
||||||
|
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'ARCHIVE',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pMinDate => DATE '2025-09-01',
|
||||||
|
* pMaxDate => DATE '2025-09-17',
|
||||||
|
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||||
|
* );
|
||||||
|
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current package version number
|
||||||
|
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns comprehensive build information including version, date, and author
|
||||||
|
* return: Formatted string with complete build details
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the version history with recent changes
|
||||||
|
* return: Multi-line string with version history
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||||
|
* It is a standard.
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Example comment:
|
||||||
|
/**
|
||||||
|
* @name EX_PROCEDURE_NAME
|
||||||
|
* @desc Procedure description
|
||||||
|
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||||
|
* @ex_rslt Example Result
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||||
|
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||||
|
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||||
|
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||||
|
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||||
|
|
||||||
|
TYPE Error_Record IS RECORD (
|
||||||
|
code PLS_INTEGER,
|
||||||
|
message VARCHAR2(4000)
|
||||||
|
);
|
||||||
|
|
||||||
|
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||||
|
|
||||||
|
Errors tErrorList;
|
||||||
|
|
||||||
|
|
||||||
|
guid VARCHAR2(32);
|
||||||
|
gvEnv VARCHAR2(200);
|
||||||
|
gvUsername VARCHAR2(128);
|
||||||
|
gvOsuser VARCHAR2(128);
|
||||||
|
gvMachine VARCHAR2(64);
|
||||||
|
gvModule VARCHAR2(64);
|
||||||
|
|
||||||
|
gvNameSpace VARCHAR2(200);
|
||||||
|
gvRegion VARCHAR2(200);
|
||||||
|
gvDataBucketName VARCHAR2(200);
|
||||||
|
gvInboxBucketName VARCHAR2(200);
|
||||||
|
gvArchiveBucketName VARCHAR2(200);
|
||||||
|
gvDataBucketUri VARCHAR2(200);
|
||||||
|
gvInboxBucketUri VARCHAR2(200);
|
||||||
|
gvArchiveBucketUri VARCHAR2(200);
|
||||||
|
gvCredentialName VARCHAR2(200);
|
||||||
|
|
||||||
|
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||||
|
|
||||||
|
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||||
|
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||||
|
|
||||||
|
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||||
|
|
||||||
|
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
|
||||||
|
vgSourceFileConfigKey PLS_INTEGER;
|
||||||
|
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
--Exceptions
|
||||||
|
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||||
|
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||||
|
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||||
|
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||||
|
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||||
|
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||||
|
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||||
|
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||||
|
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||||
|
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||||
|
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||||
|
|
||||||
|
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||||
|
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||||
|
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||||
|
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||||
|
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||||
|
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||||
|
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||||
|
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||||
|
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||||
|
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||||
|
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||||
|
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||||
|
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||||
|
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||||
|
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||||
|
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||||
|
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||||
|
|
||||||
|
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||||
|
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||||
|
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||||
|
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||||
|
|
||||||
|
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||||
|
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||||
|
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||||
|
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||||
|
|
||||||
|
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||||
|
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||||
|
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||||
|
,CODE_FILE_VALIDATION_FAILED);
|
||||||
|
|
||||||
|
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||||
|
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||||
|
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||||
|
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||||
|
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||||
|
,CODE_NO_CONFIG_MATCH);
|
||||||
|
|
||||||
|
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||||
|
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||||
|
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||||
|
,CODE_UNKNOWN_PREFIX);
|
||||||
|
|
||||||
|
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||||
|
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||||
|
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||||
|
,CODE_TABLE_NOT_EXISTS);
|
||||||
|
|
||||||
|
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||||
|
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||||
|
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||||
|
,CODE_COLUMN_NOT_EXISTS);
|
||||||
|
|
||||||
|
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||||
|
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||||
|
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||||
|
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||||
|
|
||||||
|
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||||
|
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||||
|
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||||
|
,CODE_MISSING_SOURCE_KEY);
|
||||||
|
|
||||||
|
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||||
|
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||||
|
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||||
|
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||||
|
|
||||||
|
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||||
|
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||||
|
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||||
|
,CODE_DUPLICATED_SOURCE_KEY);
|
||||||
|
|
||||||
|
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||||
|
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||||
|
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||||
|
,CODE_MISSING_CONTAINER_CONFIG);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||||
|
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||||
|
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||||
|
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||||
|
|
||||||
|
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||||
|
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||||
|
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||||
|
,CODE_WRONG_DESTINATION_PARAM);
|
||||||
|
|
||||||
|
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||||
|
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||||
|
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||||
|
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||||
|
|
||||||
|
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||||
|
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||||
|
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||||
|
,CODE_FILE_ALREADY_REGISTERED);
|
||||||
|
|
||||||
|
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||||
|
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||||
|
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||||
|
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||||
|
|
||||||
|
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||||
|
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||||
|
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||||
|
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||||
|
||cgBL||' It can be set up in two different ways:'
|
||||||
|
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||||
|
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||||
|
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||||
|
;
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||||
|
,CODE_ENVIRONMENT_NOT_SET);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||||
|
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||||
|
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||||
|
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||||
|
|
||||||
|
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||||
|
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||||
|
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||||
|
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||||
|
|
||||||
|
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||||
|
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||||
|
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||||
|
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||||
|
|
||||||
|
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||||
|
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||||
|
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||||
|
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||||
|
|
||||||
|
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||||
|
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||||
|
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||||
|
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||||
|
|
||||||
|
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||||
|
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||||
|
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||||
|
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||||
|
|
||||||
|
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||||
|
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||||
|
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||||
|
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||||
|
|
||||||
|
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||||
|
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||||
|
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||||
|
,CODE_INVALID_BUCKET_AREA);
|
||||||
|
|
||||||
|
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||||
|
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||||
|
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||||
|
,CODE_INVALID_PARALLEL_DEGREE);
|
||||||
|
|
||||||
|
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||||
|
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||||
|
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||||
|
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||||
|
|
||||||
|
ERR_UNKNOWN EXCEPTION;
|
||||||
|
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||||
|
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||||
|
,CODE_UNKNOWN);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name LOG_PROCESS_EVENT
|
||||||
|
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||||
|
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||||
|
* Respects logging level configuration (gvMinLogLevel).
|
||||||
|
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||||
|
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||||
|
**/
|
||||||
|
PROCEDURE LOG_PROCESS_EVENT (
|
||||||
|
pLogMessage VARCHAR2
|
||||||
|
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||||
|
,pParameters VARCHAR2 DEFAULT NULL
|
||||||
|
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name LOG_PROCESS_ERROR
|
||||||
|
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||||
|
* This procedure captures comprehensive error information for debugging purposes while
|
||||||
|
* allowing clean user-facing error messages to be raised separately.
|
||||||
|
* @param pLogMessage - Base error message description
|
||||||
|
* @param pParameters - Procedure parameters for context
|
||||||
|
* @param pProcessName - Name of the calling process/package
|
||||||
|
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||||
|
*/
|
||||||
|
PROCEDURE LOG_PROCESS_ERROR (
|
||||||
|
pLogMessage VARCHAR2
|
||||||
|
,pParameters VARCHAR2 DEFAULT NULL
|
||||||
|
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name INIT_ERRORS
|
||||||
|
* @desc Loads data into Errors array.
|
||||||
|
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||||
|
* Called automatically during package initialization.
|
||||||
|
* @example Called automatically when package is first referenced
|
||||||
|
* @ex_rslt Errors array populated with all error codes and messages
|
||||||
|
**/
|
||||||
|
PROCEDURE INIT_ERRORS;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DEFAULT_ENV
|
||||||
|
* @desc It returns string with name of default environment.
|
||||||
|
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||||
|
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||||
|
* @ex_rslt dev
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DEFAULT_ENV
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name INIT_VARIABLES
|
||||||
|
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||||
|
* Assign values to following global package variables:
|
||||||
|
* - gvNameSpace
|
||||||
|
* - gvRegion
|
||||||
|
* - gvCredentialName
|
||||||
|
* - gvInboxBucketName
|
||||||
|
* - gvDataBucketName
|
||||||
|
* - gvArchiveBucketName
|
||||||
|
* - gvInboxBucketUri
|
||||||
|
* - gvDataBucketUri
|
||||||
|
* - gvArchiveBucketUri
|
||||||
|
* - gvLoggingEnabled
|
||||||
|
* - gvMinLogLevel
|
||||||
|
* - gvDefaultDateFormat
|
||||||
|
* - gvConsoleLoggingEnabled
|
||||||
|
**/
|
||||||
|
PROCEDURE INIT_VARIABLES(
|
||||||
|
pEnv VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_ERROR_MESSAGE
|
||||||
|
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||||
|
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||||
|
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||||
|
* @ex_rslt File not found on the cloud
|
||||||
|
**/
|
||||||
|
FUNCTION GET_ERROR_MESSAGE(
|
||||||
|
pCode PLS_INTEGER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_ERROR_STACK
|
||||||
|
* @desc It returns string with all possible error stack info.
|
||||||
|
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||||
|
* @example
|
||||||
|
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||||
|
* pFormat => 'OUTPUT'
|
||||||
|
* ,pCode => -20009
|
||||||
|
* ,pSourceFileReceivedKey => NULL)
|
||||||
|
* from dual
|
||||||
|
* @ex_rslt
|
||||||
|
* ------------------------------------------------------+
|
||||||
|
* Error Message:
|
||||||
|
* ORA-0000: normal, successful completion
|
||||||
|
* -------------------------------------------------------
|
||||||
|
* Error Stack:
|
||||||
|
* -------------------------------------------------------
|
||||||
|
* Error Backtrace:
|
||||||
|
* ------------------------------------------------------+
|
||||||
|
**/
|
||||||
|
FUNCTION GET_ERROR_STACK(
|
||||||
|
pFormat VARCHAR2
|
||||||
|
,pCode PLS_INTEGER
|
||||||
|
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name FORMAT_PARAMETERS
|
||||||
|
* @desc Formats parameter list for logging purposes.
|
||||||
|
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||||
|
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||||
|
* @ex_rslt param1=value1 ,
|
||||||
|
* param2=NULL
|
||||||
|
**/
|
||||||
|
FUNCTION FORMAT_PARAMETERS(
|
||||||
|
pParameterList SYS.ODCIVARCHAR2LIST
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ANALYZE_VALIDATION_ERRORS
|
||||||
|
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||||
|
* Compares CSV structure with template table and provides specific error analysis.
|
||||||
|
* Includes suggested solutions for common validation issues.
|
||||||
|
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||||
|
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||||
|
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||||
|
* @param pCsvFileUri - URI of CSV file being validated
|
||||||
|
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||||
|
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||||
|
**/
|
||||||
|
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||||
|
pValidationLogTable VARCHAR2,
|
||||||
|
pTemplateSchema VARCHAR2,
|
||||||
|
pTemplateTable VARCHAR2,
|
||||||
|
pCsvFileUri VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION
|
||||||
|
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||||
|
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||||
|
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||||
|
* @ex_rslt 3.0.0
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUILD_INFO
|
||||||
|
* @desc Returns comprehensive build information including version, build date, and author.
|
||||||
|
* Formatted for display in logs or monitoring systems.
|
||||||
|
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||||
|
* @ex_rslt Package: ENV_MANAGER
|
||||||
|
* Version: 3.0.0
|
||||||
|
* Build Date: 2025-10-22 16:00:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION_HISTORY
|
||||||
|
* @desc Returns complete version history with all releases and changes.
|
||||||
|
* Shows evolution of package features over time.
|
||||||
|
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||||
|
* @ex_rslt ENV_MANAGER Version History:
|
||||||
|
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||||
|
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_PACKAGE_VERSION_INFO
|
||||||
|
* @desc Universal function to get formatted version information for any package.
|
||||||
|
* This centralized function is used by all packages in the system.
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||||
|
* @param pBuildDate - Build date timestamp
|
||||||
|
* @param pAuthor - Package author name
|
||||||
|
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||||
|
* @ex_rslt Package: FILE_MANAGER
|
||||||
|
* Version: 2.1.0
|
||||||
|
* Build Date: 2025-10-22 15:00:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pVersion VARCHAR2,
|
||||||
|
pBuildDate VARCHAR2,
|
||||||
|
pAuthor VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name FORMAT_VERSION_HISTORY
|
||||||
|
* @desc Universal function to format version history for any package.
|
||||||
|
* Adds package name header and proper formatting.
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pVersionHistory - Complete version history text
|
||||||
|
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||||
|
* @ex_rslt FILE_MANAGER Version History:
|
||||||
|
* 2.1.0 (2025-10-22): Export procedures...
|
||||||
|
**/
|
||||||
|
FUNCTION FORMAT_VERSION_HISTORY(
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pVersionHistory VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CALCULATE_PACKAGE_HASH
|
||||||
|
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||||
|
* Returns hash for both SPEC and BODY (if exists).
|
||||||
|
* Used for automatic change detection.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||||
|
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||||
|
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||||
|
**/
|
||||||
|
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name TRACK_PACKAGE_VERSION
|
||||||
|
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||||
|
* Automatically detects if source code changed without version update.
|
||||||
|
* Should be called after every package deployment.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||||
|
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||||
|
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||||
|
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||||
|
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||||
|
**/
|
||||||
|
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pPackageVersion VARCHAR2,
|
||||||
|
pPackageBuildDate VARCHAR2,
|
||||||
|
pPackageAuthor VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CHECK_PACKAGE_CHANGES
|
||||||
|
* @desc Checks if package source code has changed since last tracking.
|
||||||
|
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||||
|
* Returns detailed change detection report.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||||
|
* @ex_rslt WARNING: Package changed without version update!
|
||||||
|
* Last Version: 3.2.0
|
||||||
|
* Current Hash (SPEC): A7B3C5D9...
|
||||||
|
* Last Hash (SPEC): B8C4D6E0...
|
||||||
|
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||||
|
**/
|
||||||
|
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_PACKAGE_HASH_INFO
|
||||||
|
* @desc Returns formatted information about package hash and tracking history.
|
||||||
|
* Includes current hash, last tracked hash, and change detection status.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||||
|
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||||
|
* Current Version: 3.2.0
|
||||||
|
* Current Hash (SPEC): A7B3C5D9...
|
||||||
|
* Last Tracked: 2025-10-22 16:30:00
|
||||||
|
* Status: OK - No changes detected
|
||||||
|
**/
|
||||||
|
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END ENV_MANAGER;
|
||||||
|
/
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,233 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||||
|
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.5';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-11 12:15:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (last 3-5 changes)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||||
|
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10) ||
|
||||||
|
'v2.7.3 (2026-02-11): FIX LIKE pattern for DBMS_CLOUD.LIST_OBJECTS - Removed .csv extension from filename before pattern matching.' || CHR(10) ||
|
||||||
|
'v2.7.2 (2026-02-11): FIX pRegisterExport in EXPORT_TABLE_DATA_TO_CSV_BY_DATE - Added missing pRegisterExport parameter to EXPORT_SINGLE_PARTITION call.' || CHR(10) ||
|
||||||
|
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName to automatically find config key from A_SOURCE_FILE_CONFIG.' || CHR(10) ||
|
||||||
|
'v2.7.0 (2026-02-10): Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||||
|
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM properly assigned to vgMsgTmp variable.' || CHR(10) ||
|
||||||
|
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Session-safe cleanup with TASK_NAME filtering.' || CHR(10) ||
|
||||||
|
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry.' || CHR(10);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record type for year/month partition information
|
||||||
|
**/
|
||||||
|
TYPE partition_rec IS RECORD (
|
||||||
|
year VARCHAR2(4),
|
||||||
|
month VARCHAR2(2)
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Table type for collection of partition records
|
||||||
|
**/
|
||||||
|
TYPE partition_tab IS TABLE OF partition_rec;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_PARTITION_PARALLEL
|
||||||
|
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||||
|
* Processes single partition (year/month) chunk in parallel task.
|
||||||
|
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||||
|
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||||
|
* but should NOT be called directly by external code.
|
||||||
|
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||||
|
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
|
pStartId IN NUMBER,
|
||||||
|
pEndId IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- MAIN EXPORT PROCEDURES
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into CSV file on OCI infrustructure.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* Supports template table for column order and per-column date formatting.
|
||||||
|
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||||
|
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||||
|
* - Column order control (template defines CSV structure)
|
||||||
|
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||||
|
* - NULL = use source table columns in natural order
|
||||||
|
* @param pRegisterExport - When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'csv_exports',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into PARQUET files on OCI infrustructure.
|
||||||
|
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||||
|
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||||
|
* Validates that all columns in pColumnList exist in the target table.
|
||||||
|
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'parquet_exports',
|
||||||
|
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||||
|
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||||
|
* Creates one CSV file for each year/month combination found in the data.
|
||||||
|
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||||
|
* but exports to CSV format instead of Parquet.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||||
|
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||||
|
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* -- With custom filename
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pFileName => 'my_export.csv',
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* -- With auto-generated filename (based on table name only)
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'OU_TOP',
|
||||||
|
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'ARCHIVE',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pMinDate => DATE '2025-09-01',
|
||||||
|
* pMaxDate => DATE '2025-09-17',
|
||||||
|
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||||
|
* );
|
||||||
|
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current package version number
|
||||||
|
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns comprehensive build information including version, date, and author
|
||||||
|
* return: Formatted string with complete build details
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the version history with recent changes
|
||||||
|
* return: Multi-line string with version history
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||||
|
* It is a standard.
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Example comment:
|
||||||
|
/**
|
||||||
|
* @name EX_PROCEDURE_NAME
|
||||||
|
* @desc Procedure description
|
||||||
|
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||||
|
* @ex_rslt Example Result
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||||
|
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||||
|
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||||
|
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||||
|
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||||
|
|
||||||
|
TYPE Error_Record IS RECORD (
|
||||||
|
code PLS_INTEGER,
|
||||||
|
message VARCHAR2(4000)
|
||||||
|
);
|
||||||
|
|
||||||
|
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||||
|
|
||||||
|
Errors tErrorList;
|
||||||
|
|
||||||
|
|
||||||
|
guid VARCHAR2(32);
|
||||||
|
gvEnv VARCHAR2(200);
|
||||||
|
gvUsername VARCHAR2(128);
|
||||||
|
gvOsuser VARCHAR2(128);
|
||||||
|
gvMachine VARCHAR2(64);
|
||||||
|
gvModule VARCHAR2(64);
|
||||||
|
|
||||||
|
gvNameSpace VARCHAR2(200);
|
||||||
|
gvRegion VARCHAR2(200);
|
||||||
|
gvDataBucketName VARCHAR2(200);
|
||||||
|
gvInboxBucketName VARCHAR2(200);
|
||||||
|
gvArchiveBucketName VARCHAR2(200);
|
||||||
|
gvDataBucketUri VARCHAR2(200);
|
||||||
|
gvInboxBucketUri VARCHAR2(200);
|
||||||
|
gvArchiveBucketUri VARCHAR2(200);
|
||||||
|
gvCredentialName VARCHAR2(200);
|
||||||
|
|
||||||
|
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||||
|
|
||||||
|
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||||
|
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||||
|
|
||||||
|
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||||
|
|
||||||
|
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
|
||||||
|
vgSourceFileConfigKey PLS_INTEGER;
|
||||||
|
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
--Exceptions
|
||||||
|
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||||
|
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||||
|
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||||
|
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||||
|
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||||
|
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||||
|
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||||
|
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||||
|
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||||
|
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||||
|
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||||
|
|
||||||
|
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||||
|
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||||
|
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||||
|
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||||
|
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||||
|
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||||
|
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||||
|
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||||
|
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||||
|
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||||
|
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||||
|
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||||
|
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||||
|
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||||
|
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||||
|
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||||
|
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||||
|
|
||||||
|
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||||
|
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||||
|
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||||
|
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||||
|
|
||||||
|
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||||
|
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||||
|
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||||
|
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||||
|
|
||||||
|
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||||
|
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||||
|
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||||
|
,CODE_FILE_VALIDATION_FAILED);
|
||||||
|
|
||||||
|
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||||
|
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||||
|
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||||
|
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||||
|
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||||
|
,CODE_NO_CONFIG_MATCH);
|
||||||
|
|
||||||
|
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||||
|
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||||
|
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||||
|
,CODE_UNKNOWN_PREFIX);
|
||||||
|
|
||||||
|
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||||
|
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||||
|
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||||
|
,CODE_TABLE_NOT_EXISTS);
|
||||||
|
|
||||||
|
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||||
|
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||||
|
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||||
|
,CODE_COLUMN_NOT_EXISTS);
|
||||||
|
|
||||||
|
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||||
|
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||||
|
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||||
|
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||||
|
|
||||||
|
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||||
|
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||||
|
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||||
|
,CODE_MISSING_SOURCE_KEY);
|
||||||
|
|
||||||
|
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||||
|
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||||
|
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||||
|
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||||
|
|
||||||
|
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||||
|
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||||
|
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||||
|
,CODE_DUPLICATED_SOURCE_KEY);
|
||||||
|
|
||||||
|
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||||
|
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||||
|
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||||
|
,CODE_MISSING_CONTAINER_CONFIG);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||||
|
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||||
|
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||||
|
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||||
|
|
||||||
|
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||||
|
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||||
|
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||||
|
,CODE_WRONG_DESTINATION_PARAM);
|
||||||
|
|
||||||
|
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||||
|
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||||
|
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||||
|
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||||
|
|
||||||
|
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||||
|
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||||
|
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||||
|
,CODE_FILE_ALREADY_REGISTERED);
|
||||||
|
|
||||||
|
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||||
|
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||||
|
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||||
|
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||||
|
|
||||||
|
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||||
|
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||||
|
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||||
|
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||||
|
||cgBL||' It can be set up in two different ways:'
|
||||||
|
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||||
|
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||||
|
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||||
|
;
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||||
|
,CODE_ENVIRONMENT_NOT_SET);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||||
|
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||||
|
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||||
|
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||||
|
|
||||||
|
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||||
|
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||||
|
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||||
|
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||||
|
|
||||||
|
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||||
|
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||||
|
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||||
|
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||||
|
|
||||||
|
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||||
|
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||||
|
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||||
|
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||||
|
|
||||||
|
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||||
|
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||||
|
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||||
|
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||||
|
|
||||||
|
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||||
|
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||||
|
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||||
|
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||||
|
|
||||||
|
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||||
|
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||||
|
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||||
|
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||||
|
|
||||||
|
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||||
|
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||||
|
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||||
|
,CODE_INVALID_BUCKET_AREA);
|
||||||
|
|
||||||
|
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||||
|
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||||
|
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||||
|
,CODE_INVALID_PARALLEL_DEGREE);
|
||||||
|
|
||||||
|
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||||
|
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||||
|
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||||
|
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||||
|
|
||||||
|
ERR_UNKNOWN EXCEPTION;
|
||||||
|
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||||
|
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||||
|
,CODE_UNKNOWN);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name LOG_PROCESS_EVENT
|
||||||
|
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||||
|
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||||
|
* Respects logging level configuration (gvMinLogLevel).
|
||||||
|
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||||
|
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||||
|
**/
|
||||||
|
PROCEDURE LOG_PROCESS_EVENT (
|
||||||
|
pLogMessage VARCHAR2
|
||||||
|
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||||
|
,pParameters VARCHAR2 DEFAULT NULL
|
||||||
|
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name LOG_PROCESS_ERROR
|
||||||
|
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||||
|
* This procedure captures comprehensive error information for debugging purposes while
|
||||||
|
* allowing clean user-facing error messages to be raised separately.
|
||||||
|
* @param pLogMessage - Base error message description
|
||||||
|
* @param pParameters - Procedure parameters for context
|
||||||
|
* @param pProcessName - Name of the calling process/package
|
||||||
|
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||||
|
*/
|
||||||
|
PROCEDURE LOG_PROCESS_ERROR (
|
||||||
|
pLogMessage VARCHAR2
|
||||||
|
,pParameters VARCHAR2 DEFAULT NULL
|
||||||
|
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name INIT_ERRORS
|
||||||
|
* @desc Loads data into Errors array.
|
||||||
|
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||||
|
* Called automatically during package initialization.
|
||||||
|
* @example Called automatically when package is first referenced
|
||||||
|
* @ex_rslt Errors array populated with all error codes and messages
|
||||||
|
**/
|
||||||
|
PROCEDURE INIT_ERRORS;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DEFAULT_ENV
|
||||||
|
* @desc It returns string with name of default environment.
|
||||||
|
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||||
|
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||||
|
* @ex_rslt dev
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DEFAULT_ENV
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name INIT_VARIABLES
|
||||||
|
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||||
|
* Assign values to following global package variables:
|
||||||
|
* - gvNameSpace
|
||||||
|
* - gvRegion
|
||||||
|
* - gvCredentialName
|
||||||
|
* - gvInboxBucketName
|
||||||
|
* - gvDataBucketName
|
||||||
|
* - gvArchiveBucketName
|
||||||
|
* - gvInboxBucketUri
|
||||||
|
* - gvDataBucketUri
|
||||||
|
* - gvArchiveBucketUri
|
||||||
|
* - gvLoggingEnabled
|
||||||
|
* - gvMinLogLevel
|
||||||
|
* - gvDefaultDateFormat
|
||||||
|
* - gvConsoleLoggingEnabled
|
||||||
|
**/
|
||||||
|
PROCEDURE INIT_VARIABLES(
|
||||||
|
pEnv VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_ERROR_MESSAGE
|
||||||
|
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||||
|
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||||
|
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||||
|
* @ex_rslt File not found on the cloud
|
||||||
|
**/
|
||||||
|
FUNCTION GET_ERROR_MESSAGE(
|
||||||
|
pCode PLS_INTEGER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_ERROR_STACK
|
||||||
|
* @desc It returns string with all possible error stack info.
|
||||||
|
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||||
|
* @example
|
||||||
|
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||||
|
* pFormat => 'OUTPUT'
|
||||||
|
* ,pCode => -20009
|
||||||
|
* ,pSourceFileReceivedKey => NULL)
|
||||||
|
* from dual
|
||||||
|
* @ex_rslt
|
||||||
|
* ------------------------------------------------------+
|
||||||
|
* Error Message:
|
||||||
|
* ORA-0000: normal, successful completion
|
||||||
|
* -------------------------------------------------------
|
||||||
|
* Error Stack:
|
||||||
|
* -------------------------------------------------------
|
||||||
|
* Error Backtrace:
|
||||||
|
* ------------------------------------------------------+
|
||||||
|
**/
|
||||||
|
FUNCTION GET_ERROR_STACK(
|
||||||
|
pFormat VARCHAR2
|
||||||
|
,pCode PLS_INTEGER
|
||||||
|
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name FORMAT_PARAMETERS
|
||||||
|
* @desc Formats parameter list for logging purposes.
|
||||||
|
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||||
|
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||||
|
* @ex_rslt param1=value1 ,
|
||||||
|
* param2=NULL
|
||||||
|
**/
|
||||||
|
FUNCTION FORMAT_PARAMETERS(
|
||||||
|
pParameterList SYS.ODCIVARCHAR2LIST
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ANALYZE_VALIDATION_ERRORS
|
||||||
|
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||||
|
* Compares CSV structure with template table and provides specific error analysis.
|
||||||
|
* Includes suggested solutions for common validation issues.
|
||||||
|
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||||
|
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||||
|
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||||
|
* @param pCsvFileUri - URI of CSV file being validated
|
||||||
|
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||||
|
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||||
|
**/
|
||||||
|
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||||
|
pValidationLogTable VARCHAR2,
|
||||||
|
pTemplateSchema VARCHAR2,
|
||||||
|
pTemplateTable VARCHAR2,
|
||||||
|
pCsvFileUri VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION
|
||||||
|
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||||
|
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||||
|
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||||
|
* @ex_rslt 3.0.0
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUILD_INFO
|
||||||
|
* @desc Returns comprehensive build information including version, build date, and author.
|
||||||
|
* Formatted for display in logs or monitoring systems.
|
||||||
|
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||||
|
* @ex_rslt Package: ENV_MANAGER
|
||||||
|
* Version: 3.0.0
|
||||||
|
* Build Date: 2025-10-22 16:00:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION_HISTORY
|
||||||
|
* @desc Returns complete version history with all releases and changes.
|
||||||
|
* Shows evolution of package features over time.
|
||||||
|
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||||
|
* @ex_rslt ENV_MANAGER Version History:
|
||||||
|
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||||
|
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_PACKAGE_VERSION_INFO
|
||||||
|
* @desc Universal function to get formatted version information for any package.
|
||||||
|
* This centralized function is used by all packages in the system.
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||||
|
* @param pBuildDate - Build date timestamp
|
||||||
|
* @param pAuthor - Package author name
|
||||||
|
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||||
|
* @ex_rslt Package: FILE_MANAGER
|
||||||
|
* Version: 2.1.0
|
||||||
|
* Build Date: 2025-10-22 15:00:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pVersion VARCHAR2,
|
||||||
|
pBuildDate VARCHAR2,
|
||||||
|
pAuthor VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name FORMAT_VERSION_HISTORY
|
||||||
|
* @desc Universal function to format version history for any package.
|
||||||
|
* Adds package name header and proper formatting.
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pVersionHistory - Complete version history text
|
||||||
|
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||||
|
* @ex_rslt FILE_MANAGER Version History:
|
||||||
|
* 2.1.0 (2025-10-22): Export procedures...
|
||||||
|
**/
|
||||||
|
FUNCTION FORMAT_VERSION_HISTORY(
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pVersionHistory VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CALCULATE_PACKAGE_HASH
|
||||||
|
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||||
|
* Returns hash for both SPEC and BODY (if exists).
|
||||||
|
* Used for automatic change detection.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||||
|
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||||
|
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||||
|
**/
|
||||||
|
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name TRACK_PACKAGE_VERSION
|
||||||
|
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||||
|
* Automatically detects if source code changed without version update.
|
||||||
|
* Should be called after every package deployment.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||||
|
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||||
|
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||||
|
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||||
|
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||||
|
**/
|
||||||
|
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pPackageVersion VARCHAR2,
|
||||||
|
pPackageBuildDate VARCHAR2,
|
||||||
|
pPackageAuthor VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CHECK_PACKAGE_CHANGES
|
||||||
|
* @desc Checks if package source code has changed since last tracking.
|
||||||
|
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||||
|
* Returns detailed change detection report.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||||
|
* @ex_rslt WARNING: Package changed without version update!
|
||||||
|
* Last Version: 3.2.0
|
||||||
|
* Current Hash (SPEC): A7B3C5D9...
|
||||||
|
* Last Hash (SPEC): B8C4D6E0...
|
||||||
|
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||||
|
**/
|
||||||
|
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_PACKAGE_HASH_INFO
|
||||||
|
* @desc Returns formatted information about package hash and tracking history.
|
||||||
|
* Includes current hash, last tracked hash, and change detection status.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||||
|
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||||
|
* Current Version: 3.2.0
|
||||||
|
* Current Hash (SPEC): A7B3C5D9...
|
||||||
|
* Last Tracked: 2025-10-22 16:30:00
|
||||||
|
* Status: OK - No changes detected
|
||||||
|
**/
|
||||||
|
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END ENV_MANAGER;
|
||||||
|
/
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,227 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||||
|
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.8.0';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-10 11:00:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'v2.8.0 (2026-02-10): CRITICAL FIX - Removed duplicate post-export registration code that conflicted with per-partition registration. Post-export registration (SERVICE_NAME=DATA_EXPORTER, WORKFLOW_START=SYSTIMESTAMP) removed. Per-partition registration (SERVICE_NAME=CSV_EXPORT, WORKFLOW_START=partition_date) now executes exclusively. Prevents duplicate workflow records and ensures CSV files contain constant workflow keys instead of A_ETL_LOAD_SET_FK aliases.' || cgBL ||
|
||||||
|
'v2.7.0 (2026-02-09): NEW FEATURE - Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, successfully exported files are registered in A_WORKFLOW_HISTORY (one record per YEAR/MONTH) and A_SOURCE_FILE_RECEIVED tables for tracking and audit purposes.' || cgBL ||
|
||||||
|
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM and DBMS_UTILITY.FORMAT_ERROR_BACKTRACE cannot be used directly in SQL UPDATE statements. Now properly assigned to vgMsgTmp variable before UPDATE.' || cgBL ||
|
||||||
|
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Changed DELETE to filter by age (>24h) instead of deleting all COMPLETED chunks. Prevents concurrent sessions from deleting each other chunks. Session-safe cleanup with TASK_NAME filtering. Enables true parallel execution of multiple export jobs.' || cgBL ||
|
||||||
|
'v2.6.1 (2026-01-28): Added DELETE_FAILED_EXPORT_FILE procedure to clean up partial/corrupted files before retry. When partition fails mid-export, partial file is deleted before retry to prevent Oracle from creating _1 suffixed duplicates. Ensures clean retry without orphaned files in OCI bucket.' || cgBL ||
|
||||||
|
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry. System now restarts ONLY failed partitions instead of re-exporting all data. Added ERROR_MESSAGE and EXPORT_TIMESTAMP columns for better error handling and monitoring. Prevents duplicate file creation when parallel tasks fail (e.g., 22 partitions with 16 threads, 3 failures no longer duplicates 19 successful exports).' || cgBL ||
|
||||||
|
'v2.5.0 (2026-01-26): Added recorddelimiter parameter with CRLF (CHR(13)||CHR(10)) for CSV exports to ensure Windows-compatible line endings. Improves cross-platform compatibility when CSV files are opened in Windows applications (Notepad, Excel).' || cgBL ||
|
||||||
|
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || cgBL ||
|
||||||
|
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || cgBL ||
|
||||||
|
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || cgBL ||
|
||||||
|
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || cgBL ||
|
||||||
|
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || cgBL ||
|
||||||
|
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package';
|
||||||
|
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record type for year/month partition information
|
||||||
|
**/
|
||||||
|
TYPE partition_rec IS RECORD (
|
||||||
|
year VARCHAR2(4),
|
||||||
|
month VARCHAR2(2)
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Table type for collection of partition records
|
||||||
|
**/
|
||||||
|
TYPE partition_tab IS TABLE OF partition_rec;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_PARTITION_PARALLEL
|
||||||
|
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||||
|
* Processes single partition (year/month) chunk in parallel task.
|
||||||
|
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||||
|
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||||
|
* but should NOT be called directly by external code.
|
||||||
|
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||||
|
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
|
pStartId IN NUMBER,
|
||||||
|
pEndId IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- MAIN EXPORT PROCEDURES
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into CSV file on OCI infrustructure.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'csv_exports'
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into PARQUET files on OCI infrustructure.
|
||||||
|
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||||
|
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||||
|
* Validates that all columns in pColumnList exist in the target table.
|
||||||
|
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'parquet_exports',
|
||||||
|
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||||
|
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||||
|
* Creates one CSV file for each year/month combination found in the data.
|
||||||
|
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||||
|
* but exports to CSV format instead of Parquet.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||||
|
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||||
|
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||||
|
* - CT_MRDS.A_WORKFLOW_HISTORY (one record per YEAR/MONTH with export timestamp)
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location and partition info)
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* -- With custom filename
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pFileName => 'my_export.csv',
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_WORKFLOW_HISTORY and A_SOURCE_FILE_RECEIVED
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* -- With auto-generated filename (based on table name only)
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'OU_TOP',
|
||||||
|
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'ARCHIVE',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pMinDate => DATE '2025-09-01',
|
||||||
|
* pMaxDate => DATE '2025-09-17',
|
||||||
|
* pRegisterExport => TRUE -- Registers each export to tracking tables
|
||||||
|
* );
|
||||||
|
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current package version number
|
||||||
|
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns comprehensive build information including version, date, and author
|
||||||
|
* return: Formatted string with complete build details
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the version history with recent changes
|
||||||
|
* return: Multi-line string with version history
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,239 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||||
|
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.9.0';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-13 14:00:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (last 3-5 changes)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
|
||||||
|
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10) ||
|
||||||
|
'v2.8.0 (2026-02-12): MAJOR REFACTOR - EXPORT_TABLE_DATA now exports to single CSV file instead of partitioning by key values. Added pFileName parameter.' || CHR(10) ||
|
||||||
|
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||||
|
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record type for year/month partition information
|
||||||
|
**/
|
||||||
|
TYPE partition_rec IS RECORD (
|
||||||
|
year VARCHAR2(4),
|
||||||
|
month VARCHAR2(2)
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Table type for collection of partition records
|
||||||
|
**/
|
||||||
|
TYPE partition_tab IS TABLE OF partition_rec;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_PARTITION_PARALLEL
|
||||||
|
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||||
|
* Processes single partition (year/month) chunk in parallel task.
|
||||||
|
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||||
|
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||||
|
* but should NOT be called directly by external code.
|
||||||
|
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||||
|
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
|
pStartId IN NUMBER,
|
||||||
|
pEndId IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- MAIN EXPORT PROCEDURES
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into single CSV file on OCI infrastructure.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* Supports template table for column order and per-column date formatting.
|
||||||
|
* When pRegisterExport=TRUE, successfully exported file is registered in:
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||||
|
* @param pFileName - Optional filename (e.g., 'export.csv'). NULL = auto-generate from table name
|
||||||
|
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||||
|
* - Column order control (template defines CSV structure)
|
||||||
|
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||||
|
* - NULL = use source table columns in natural order
|
||||||
|
* @param pMaxFileSize - Maximum file size in bytes (default 104857600 = 100MB, min 10MB, max 1GB)
|
||||||
|
* @param pRegisterExport - When TRUE, registers exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||||
|
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'csv_exports',
|
||||||
|
* pFileName => 'my_export.csv', -- Optional
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||||
|
* pMaxFileSize => 104857600, -- Optional, default 100MB
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pFileName IN VARCHAR2 default NULL,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||||
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
|
* Exports data into PARQUET files on OCI infrustructure.
|
||||||
|
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||||
|
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||||
|
* Validates that all columns in pColumnList exist in the target table.
|
||||||
|
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'parquet_exports',
|
||||||
|
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||||
|
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||||
|
* Creates one CSV file for each year/month combination found in the data.
|
||||||
|
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||||
|
* but exports to CSV format instead of Parquet.
|
||||||
|
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||||
|
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||||
|
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||||
|
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||||
|
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* -- With custom filename
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'CT_MRDS',
|
||||||
|
* pTableName => 'MY_TABLE',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'DATA',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pFileName => 'my_export.csv',
|
||||||
|
* pMinDate => DATE '2024-01-01',
|
||||||
|
* pMaxDate => SYSDATE,
|
||||||
|
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||||
|
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* -- With auto-generated filename (based on table name only)
|
||||||
|
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
* pSchemaName => 'OU_TOP',
|
||||||
|
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||||
|
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
* pBucketArea => 'ARCHIVE',
|
||||||
|
* pFolderName => 'exports',
|
||||||
|
* pMinDate => DATE '2025-09-01',
|
||||||
|
* pMaxDate => DATE '2025-09-17',
|
||||||
|
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||||
|
* );
|
||||||
|
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||||
|
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||||
|
pSchemaName IN VARCHAR2,
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pKeyColumnName IN VARCHAR2,
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderName IN VARCHAR2,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pColumnList IN VARCHAR2 default NULL,
|
||||||
|
pMinDate IN DATE default DATE '1900-01-01',
|
||||||
|
pMaxDate IN DATE default SYSDATE,
|
||||||
|
pParallelDegree IN NUMBER default 1,
|
||||||
|
pTemplateTableName IN VARCHAR2 default NULL,
|
||||||
|
pMaxFileSize IN NUMBER default 104857600,
|
||||||
|
pRegisterExport IN BOOLEAN default FALSE,
|
||||||
|
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||||
|
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current package version number
|
||||||
|
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns comprehensive build information including version, date, and author
|
||||||
|
* return: Formatted string with complete build details
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the version history with recent changes
|
||||||
|
* return: Multi-line string with version history
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||||
|
* It is a standard.
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Example comment:
|
||||||
|
/**
|
||||||
|
* @name EX_PROCEDURE_NAME
|
||||||
|
* @desc Procedure description
|
||||||
|
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||||
|
* @ex_rslt Example Result
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||||
|
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||||
|
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||||
|
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||||
|
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||||
|
|
||||||
|
TYPE Error_Record IS RECORD (
|
||||||
|
code PLS_INTEGER,
|
||||||
|
message VARCHAR2(4000)
|
||||||
|
);
|
||||||
|
|
||||||
|
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||||
|
|
||||||
|
Errors tErrorList;
|
||||||
|
|
||||||
|
|
||||||
|
guid VARCHAR2(32);
|
||||||
|
gvEnv VARCHAR2(200);
|
||||||
|
gvUsername VARCHAR2(128);
|
||||||
|
gvOsuser VARCHAR2(128);
|
||||||
|
gvMachine VARCHAR2(64);
|
||||||
|
gvModule VARCHAR2(64);
|
||||||
|
|
||||||
|
gvNameSpace VARCHAR2(200);
|
||||||
|
gvRegion VARCHAR2(200);
|
||||||
|
gvDataBucketName VARCHAR2(200);
|
||||||
|
gvInboxBucketName VARCHAR2(200);
|
||||||
|
gvArchiveBucketName VARCHAR2(200);
|
||||||
|
gvDataBucketUri VARCHAR2(200);
|
||||||
|
gvInboxBucketUri VARCHAR2(200);
|
||||||
|
gvArchiveBucketUri VARCHAR2(200);
|
||||||
|
gvCredentialName VARCHAR2(200);
|
||||||
|
|
||||||
|
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||||
|
|
||||||
|
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||||
|
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||||
|
|
||||||
|
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||||
|
|
||||||
|
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||||
|
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
|
||||||
|
vgSourceFileConfigKey PLS_INTEGER;
|
||||||
|
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
--Exceptions
|
||||||
|
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||||
|
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||||
|
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||||
|
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||||
|
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||||
|
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||||
|
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||||
|
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||||
|
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||||
|
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||||
|
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||||
|
|
||||||
|
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||||
|
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||||
|
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||||
|
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||||
|
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||||
|
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||||
|
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||||
|
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||||
|
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||||
|
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||||
|
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||||
|
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||||
|
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||||
|
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||||
|
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||||
|
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||||
|
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||||
|
|
||||||
|
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||||
|
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||||
|
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||||
|
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||||
|
|
||||||
|
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||||
|
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||||
|
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||||
|
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||||
|
|
||||||
|
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||||
|
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||||
|
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||||
|
,CODE_FILE_VALIDATION_FAILED);
|
||||||
|
|
||||||
|
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||||
|
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||||
|
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||||
|
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||||
|
|
||||||
|
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||||
|
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||||
|
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||||
|
,CODE_NO_CONFIG_MATCH);
|
||||||
|
|
||||||
|
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||||
|
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||||
|
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||||
|
,CODE_UNKNOWN_PREFIX);
|
||||||
|
|
||||||
|
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||||
|
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||||
|
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||||
|
,CODE_TABLE_NOT_EXISTS);
|
||||||
|
|
||||||
|
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||||
|
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||||
|
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||||
|
,CODE_COLUMN_NOT_EXISTS);
|
||||||
|
|
||||||
|
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||||
|
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||||
|
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||||
|
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||||
|
|
||||||
|
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||||
|
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||||
|
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||||
|
,CODE_MISSING_SOURCE_KEY);
|
||||||
|
|
||||||
|
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||||
|
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||||
|
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||||
|
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||||
|
|
||||||
|
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||||
|
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||||
|
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||||
|
,CODE_DUPLICATED_SOURCE_KEY);
|
||||||
|
|
||||||
|
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||||
|
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||||
|
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||||
|
,CODE_MISSING_CONTAINER_CONFIG);
|
||||||
|
|
||||||
|
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||||
|
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||||
|
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||||
|
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||||
|
|
||||||
|
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||||
|
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||||
|
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||||
|
,CODE_WRONG_DESTINATION_PARAM);
|
||||||
|
|
||||||
|
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||||
|
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||||
|
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||||
|
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||||
|
|
||||||
|
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||||
|
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||||
|
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||||
|
,CODE_FILE_ALREADY_REGISTERED);
|
||||||
|
|
||||||
|
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||||
|
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||||
|
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||||
|
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||||
|
|
||||||
|
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||||
|
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||||
|
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||||
|
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||||
|
||cgBL||' It can be set up in two different ways:'
|
||||||
|
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||||
|
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||||
|
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||||
|
;
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||||
|
,CODE_ENVIRONMENT_NOT_SET);
|
||||||
|
|
||||||
|
|
||||||
|
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||||
|
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||||
|
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||||
|
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||||
|
|
||||||
|
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||||
|
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||||
|
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||||
|
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||||
|
|
||||||
|
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||||
|
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||||
|
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||||
|
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||||
|
|
||||||
|
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||||
|
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||||
|
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||||
|
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||||
|
|
||||||
|
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||||
|
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||||
|
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||||
|
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||||
|
|
||||||
|
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||||
|
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||||
|
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||||
|
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||||
|
|
||||||
|
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||||
|
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||||
|
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||||
|
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||||
|
|
||||||
|
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||||
|
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||||
|
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||||
|
,CODE_INVALID_BUCKET_AREA);
|
||||||
|
|
||||||
|
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||||
|
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||||
|
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||||
|
,CODE_INVALID_PARALLEL_DEGREE);
|
||||||
|
|
||||||
|
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||||
|
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||||
|
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||||
|
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||||
|
|
||||||
|
ERR_UNKNOWN EXCEPTION;
|
||||||
|
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||||
|
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||||
|
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||||
|
,CODE_UNKNOWN);
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name LOG_PROCESS_EVENT
|
||||||
|
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||||
|
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||||
|
* Respects logging level configuration (gvMinLogLevel).
|
||||||
|
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||||
|
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||||
|
**/
|
||||||
|
PROCEDURE LOG_PROCESS_EVENT (
|
||||||
|
pLogMessage VARCHAR2
|
||||||
|
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||||
|
,pParameters VARCHAR2 DEFAULT NULL
|
||||||
|
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name LOG_PROCESS_ERROR
|
||||||
|
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||||
|
* This procedure captures comprehensive error information for debugging purposes while
|
||||||
|
* allowing clean user-facing error messages to be raised separately.
|
||||||
|
* @param pLogMessage - Base error message description
|
||||||
|
* @param pParameters - Procedure parameters for context
|
||||||
|
* @param pProcessName - Name of the calling process/package
|
||||||
|
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||||
|
*/
|
||||||
|
PROCEDURE LOG_PROCESS_ERROR (
|
||||||
|
pLogMessage VARCHAR2
|
||||||
|
,pParameters VARCHAR2 DEFAULT NULL
|
||||||
|
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name INIT_ERRORS
|
||||||
|
* @desc Loads data into Errors array.
|
||||||
|
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||||
|
* Called automatically during package initialization.
|
||||||
|
* @example Called automatically when package is first referenced
|
||||||
|
* @ex_rslt Errors array populated with all error codes and messages
|
||||||
|
**/
|
||||||
|
PROCEDURE INIT_ERRORS;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DEFAULT_ENV
|
||||||
|
* @desc It returns string with name of default environment.
|
||||||
|
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||||
|
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||||
|
* @ex_rslt dev
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DEFAULT_ENV
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name INIT_VARIABLES
|
||||||
|
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||||
|
* Assign values to following global package variables:
|
||||||
|
* - gvNameSpace
|
||||||
|
* - gvRegion
|
||||||
|
* - gvCredentialName
|
||||||
|
* - gvInboxBucketName
|
||||||
|
* - gvDataBucketName
|
||||||
|
* - gvArchiveBucketName
|
||||||
|
* - gvInboxBucketUri
|
||||||
|
* - gvDataBucketUri
|
||||||
|
* - gvArchiveBucketUri
|
||||||
|
* - gvLoggingEnabled
|
||||||
|
* - gvMinLogLevel
|
||||||
|
* - gvDefaultDateFormat
|
||||||
|
* - gvConsoleLoggingEnabled
|
||||||
|
**/
|
||||||
|
PROCEDURE INIT_VARIABLES(
|
||||||
|
pEnv VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_ERROR_MESSAGE
|
||||||
|
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||||
|
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||||
|
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||||
|
* @ex_rslt File not found on the cloud
|
||||||
|
**/
|
||||||
|
FUNCTION GET_ERROR_MESSAGE(
|
||||||
|
pCode PLS_INTEGER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_ERROR_STACK
|
||||||
|
* @desc It returns string with all possible error stack info.
|
||||||
|
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||||
|
* @example
|
||||||
|
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||||
|
* pFormat => 'OUTPUT'
|
||||||
|
* ,pCode => -20009
|
||||||
|
* ,pSourceFileReceivedKey => NULL)
|
||||||
|
* from dual
|
||||||
|
* @ex_rslt
|
||||||
|
* ------------------------------------------------------+
|
||||||
|
* Error Message:
|
||||||
|
* ORA-0000: normal, successful completion
|
||||||
|
* -------------------------------------------------------
|
||||||
|
* Error Stack:
|
||||||
|
* -------------------------------------------------------
|
||||||
|
* Error Backtrace:
|
||||||
|
* ------------------------------------------------------+
|
||||||
|
**/
|
||||||
|
FUNCTION GET_ERROR_STACK(
|
||||||
|
pFormat VARCHAR2
|
||||||
|
,pCode PLS_INTEGER
|
||||||
|
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name FORMAT_PARAMETERS
|
||||||
|
* @desc Formats parameter list for logging purposes.
|
||||||
|
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||||
|
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||||
|
* @ex_rslt param1=value1 ,
|
||||||
|
* param2=NULL
|
||||||
|
**/
|
||||||
|
FUNCTION FORMAT_PARAMETERS(
|
||||||
|
pParameterList SYS.ODCIVARCHAR2LIST
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ANALYZE_VALIDATION_ERRORS
|
||||||
|
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||||
|
* Compares CSV structure with template table and provides specific error analysis.
|
||||||
|
* Includes suggested solutions for common validation issues.
|
||||||
|
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||||
|
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||||
|
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||||
|
* @param pCsvFileUri - URI of CSV file being validated
|
||||||
|
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||||
|
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||||
|
**/
|
||||||
|
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||||
|
pValidationLogTable VARCHAR2,
|
||||||
|
pTemplateSchema VARCHAR2,
|
||||||
|
pTemplateTable VARCHAR2,
|
||||||
|
pCsvFileUri VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION
|
||||||
|
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||||
|
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||||
|
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||||
|
* @ex_rslt 3.0.0
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUILD_INFO
|
||||||
|
* @desc Returns comprehensive build information including version, build date, and author.
|
||||||
|
* Formatted for display in logs or monitoring systems.
|
||||||
|
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||||
|
* @ex_rslt Package: ENV_MANAGER
|
||||||
|
* Version: 3.0.0
|
||||||
|
* Build Date: 2025-10-22 16:00:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION_HISTORY
|
||||||
|
* @desc Returns complete version history with all releases and changes.
|
||||||
|
* Shows evolution of package features over time.
|
||||||
|
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||||
|
* @ex_rslt ENV_MANAGER Version History:
|
||||||
|
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||||
|
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_PACKAGE_VERSION_INFO
|
||||||
|
* @desc Universal function to get formatted version information for any package.
|
||||||
|
* This centralized function is used by all packages in the system.
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||||
|
* @param pBuildDate - Build date timestamp
|
||||||
|
* @param pAuthor - Package author name
|
||||||
|
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||||
|
* @ex_rslt Package: FILE_MANAGER
|
||||||
|
* Version: 2.1.0
|
||||||
|
* Build Date: 2025-10-22 15:00:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pVersion VARCHAR2,
|
||||||
|
pBuildDate VARCHAR2,
|
||||||
|
pAuthor VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name FORMAT_VERSION_HISTORY
|
||||||
|
* @desc Universal function to format version history for any package.
|
||||||
|
* Adds package name header and proper formatting.
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pVersionHistory - Complete version history text
|
||||||
|
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||||
|
* @ex_rslt FILE_MANAGER Version History:
|
||||||
|
* 2.1.0 (2025-10-22): Export procedures...
|
||||||
|
**/
|
||||||
|
FUNCTION FORMAT_VERSION_HISTORY(
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pVersionHistory VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CALCULATE_PACKAGE_HASH
|
||||||
|
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||||
|
* Returns hash for both SPEC and BODY (if exists).
|
||||||
|
* Used for automatic change detection.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||||
|
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||||
|
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||||
|
**/
|
||||||
|
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name TRACK_PACKAGE_VERSION
|
||||||
|
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||||
|
* Automatically detects if source code changed without version update.
|
||||||
|
* Should be called after every package deployment.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||||
|
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||||
|
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||||
|
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||||
|
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||||
|
**/
|
||||||
|
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2,
|
||||||
|
pPackageVersion VARCHAR2,
|
||||||
|
pPackageBuildDate VARCHAR2,
|
||||||
|
pPackageAuthor VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CHECK_PACKAGE_CHANGES
|
||||||
|
* @desc Checks if package source code has changed since last tracking.
|
||||||
|
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||||
|
* Returns detailed change detection report.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||||
|
* @ex_rslt WARNING: Package changed without version update!
|
||||||
|
* Last Version: 3.2.0
|
||||||
|
* Current Hash (SPEC): A7B3C5D9...
|
||||||
|
* Last Hash (SPEC): B8C4D6E0...
|
||||||
|
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||||
|
**/
|
||||||
|
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_PACKAGE_HASH_INFO
|
||||||
|
* @desc Returns formatted information about package hash and tracking history.
|
||||||
|
* Includes current hash, last tracked hash, and change detection status.
|
||||||
|
* @param pPackageOwner - Schema owner of the package
|
||||||
|
* @param pPackageName - Name of the package
|
||||||
|
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||||
|
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||||
|
* Current Version: 3.2.0
|
||||||
|
* Current Hash (SPEC): A7B3C5D9...
|
||||||
|
* Last Tracked: 2025-10-22 16:30:00
|
||||||
|
* Status: OK - No changes detected
|
||||||
|
**/
|
||||||
|
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||||
|
pPackageOwner VARCHAR2,
|
||||||
|
pPackageName VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END ENV_MANAGER;
|
||||||
|
/
|
||||||
@@ -1,122 +1,31 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-835: Export Group 1 - Split DATA + HIST (DEBT, DEBT_DAILY)
|
-- MARS-835: Export Group 1 - HIST Only (DEBT, DEBT_DAILY)
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Export last 6 months to DATA bucket (CSV), older data to HIST bucket (Parquet)
|
-- Purpose: Export ALL data to HIST bucket (Parquet with Hive-style partitioning)
|
||||||
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||||
-- Excludes legacy columns not required in new structure
|
-- Excludes legacy columns not required in new structure
|
||||||
-- USES: DATA_EXPORTER v2.4.0 with pTemplateTableName for column order and date formats
|
-- USES: DATA_EXPORTER v2.12.0 with pTemplateTableName for column order and date formats
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-12-17
|
-- Date: 2025-12-17
|
||||||
-- Updated: 2026-01-11 (Updated to DATA_EXPORTER v2.4.0 with pTemplateTableName)
|
-- Updated: 2026-02-24 (Changed to HIST-only export, no DATA bucket split)
|
||||||
-- Related: MARS-835 - CSDB Data Export
|
-- Related: MARS-835 - CSDB Data Export
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
SET TIMING ON
|
SET TIMING ON
|
||||||
|
|
||||||
DEFINE cutoff_date = "TRUNC(ADD_MONTHS(SYSDATE, -6), 'MM')"
|
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Exporting CSDB.DEBT - Split DATA + HIST
|
PROMPT Exporting CSDB.DEBT - HIST Only
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
PROMPT ALL data to HIST bucket (Parquet with Hive-style partitioning)
|
||||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
|
||||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||||
PROMPT Excluded columns: IDIRDEPOSITORY, VA_BONDDURATION
|
PROMPT Excluded columns: IDIRDEPOSITORY, VA_BONDDURATION
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
-- PRE-EXPORT CHECK: List existing files and count records
|
-- Export ALL data to HIST bucket (Parquet)
|
||||||
DECLARE
|
-- NEW v2.12.0: Per-column date format handling with template table, full data range
|
||||||
vFileCount NUMBER := 0;
|
|
||||||
vRecordCount NUMBER := 0;
|
|
||||||
vLocationUri VARCHAR2(1000);
|
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URI for DATA bucket
|
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to HIST bucket (ALL data)...');
|
||||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT/';
|
|
||||||
|
|
||||||
-- Count existing files
|
|
||||||
SELECT COUNT(*)
|
|
||||||
INTO vFileCount
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
|
||||||
|
|
||||||
IF vFileCount > 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
|
|
||||||
-- List existing files
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'
|
|
||||||
ORDER BY object_name
|
|
||||||
) LOOP
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- Count records in external table
|
|
||||||
BEGIN
|
|
||||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS' INTO vRecordCount;
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
|
||||||
END;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export recent data to DATA bucket (CSV)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to DATA bucket (last 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
|
||||||
pSchemaName => 'OU_CSDB',
|
|
||||||
pTableName => 'LEGACY_DEBT',
|
|
||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
|
||||||
pBucketArea => 'DATA',
|
|
||||||
pFolderName => 'ODS/CSDB/CSDB_DEBT',
|
|
||||||
pMinDate => &cutoff_date,
|
|
||||||
pMaxDate => SYSDATE,
|
|
||||||
pParallelDegree => 16,
|
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
|
||||||
pMaxFileSize => 104857600 -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
|
||||||
);
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to DATA bucket with template column order');
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export historical data to HIST bucket (Parquet)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to HIST bucket (older than 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
@@ -125,9 +34,11 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||||
pMaxDate => &cutoff_date,
|
pMinDate => DATE '1900-01-01', -- Include all historical data
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include all future dates
|
||||||
pParallelDegree => 16,
|
pParallelDegree => 16,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT'
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||||
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to HIST bucket with template column order');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to HIST bucket with template column order');
|
||||||
@@ -135,107 +46,18 @@ END;
|
|||||||
/
|
/
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Exporting CSDB.LEGACY_DEBT_DAILY - Split DATA + HIST
|
PROMPT Exporting CSDB.LEGACY_DEBT_DAILY - HIST Only
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
PROMPT ALL data to HIST bucket (Parquet with Hive-style partitioning)
|
||||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
|
||||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||||
PROMPT Excluded columns: STEPID, PROGRAMNAME, PROGRAMCEILING, PROGRAMSTATUS,
|
PROMPT Excluded columns: STEPID, PROGRAMNAME, PROGRAMCEILING, PROGRAMSTATUS,
|
||||||
PROMPT ISSUERNACE21SECTOR, INSTRUMENTQUOTATIONBASIS
|
PROMPT ISSUERNACE21SECTOR, INSTRUMENTQUOTATIONBASIS
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
-- PRE-EXPORT CHECK: List existing files and count records
|
-- Export ALL data to HIST bucket (Parquet)
|
||||||
DECLARE
|
-- NEW v2.12.0: Per-column date format handling with template table, full data range
|
||||||
vFileCount NUMBER := 0;
|
|
||||||
vRecordCount NUMBER := 0;
|
|
||||||
vLocationUri VARCHAR2(1000);
|
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URI for DATA bucket
|
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to HIST bucket (ALL data)...');
|
||||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT_DAILY/';
|
|
||||||
|
|
||||||
-- Count existing files
|
|
||||||
SELECT COUNT(*)
|
|
||||||
INTO vFileCount
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
|
||||||
|
|
||||||
IF vFileCount > 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
|
|
||||||
-- List existing files
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'
|
|
||||||
ORDER BY object_name
|
|
||||||
) LOOP
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- Count records in external table
|
|
||||||
BEGIN
|
|
||||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_DAILY_ODS' INTO vRecordCount;
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
|
||||||
END;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export recent data to DATA bucket (CSV)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to DATA bucket (last 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
|
||||||
pSchemaName => 'OU_CSDB',
|
|
||||||
pTableName => 'LEGACY_DEBT_DAILY',
|
|
||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
|
||||||
pBucketArea => 'DATA',
|
|
||||||
pFolderName => 'ODS/CSDB/CSDB_DEBT_DAILY',
|
|
||||||
pMinDate => &cutoff_date,
|
|
||||||
pMaxDate => SYSDATE,
|
|
||||||
pParallelDegree => 16,
|
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
|
||||||
pMaxFileSize => 104857600 -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
|
||||||
);
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to DATA bucket with template column order');
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export historical data to HIST bucket (Parquet)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to HIST bucket (older than 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
@@ -244,9 +66,11 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
||||||
pMaxDate => &cutoff_date,
|
pMinDate => DATE '1900-01-01', -- Include all historical data
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include all future dates
|
||||||
pParallelDegree => 16,
|
pParallelDegree => 16,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY'
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||||
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to HIST bucket with template column order');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to HIST bucket with template column order');
|
||||||
@@ -256,8 +80,8 @@ END;
|
|||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Group 1 Export Completed
|
PROMPT Group 1 Export Completed
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT - LEGACY_DEBT: DATA + HIST exported
|
PROMPT - LEGACY_DEBT: HIST exported (ALL data)
|
||||||
PROMPT - LEGACY_DEBT_DAILY: DATA + HIST exported
|
PROMPT - LEGACY_DEBT_DAILY: HIST exported (ALL data)
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|||||||
@@ -33,9 +33,11 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL',
|
||||||
pMaxDate => SYSDATE,
|
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||||
pParallelDegree => 8,
|
pParallelDegree => 8,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL'
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL',
|
||||||
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_RAT_FULL exported to HIST bucket with template column order');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_RAT_FULL exported to HIST bucket with template column order');
|
||||||
@@ -60,9 +62,11 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL',
|
||||||
pMaxDate => SYSDATE,
|
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||||
pParallelDegree => 8,
|
pParallelDegree => 8,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL'
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL',
|
||||||
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_DESC_FULL exported to HIST bucket with template column order');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_DESC_FULL exported to HIST bucket with template column order');
|
||||||
@@ -87,9 +91,11 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL',
|
||||||
pMaxDate => SYSDATE,
|
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||||
pParallelDegree => 8,
|
pParallelDegree => 8,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL'
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL',
|
||||||
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_RAT_FULL exported to HIST bucket with template column order');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_RAT_FULL exported to HIST bucket with template column order');
|
||||||
@@ -114,9 +120,11 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL',
|
||||||
pMaxDate => SYSDATE,
|
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||||
pParallelDegree => 8,
|
pParallelDegree => 8,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL'
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL',
|
||||||
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_DESC_FULL exported to HIST bucket with template column order');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_DESC_FULL exported to HIST bucket with template column order');
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
-- Script: 03_MARS_835_verify_exports.sql
|
-- Script: 03_MARS_835_verify_exports.sql
|
||||||
-- Purpose: Verify exported files exist in DATA and HIST buckets after export
|
-- Purpose: Verify exported files exist in HIST bucket after export (HIST-only strategy)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Created: 2025-12-17
|
-- Created: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only verification)
|
||||||
-- MARS Issue: MARS-835
|
-- MARS Issue: MARS-835
|
||||||
-- Target Locations: mrds_data_dev/ODS/CSDB/, mrds_hist_dev/ARCHIVE/CSDB/
|
-- Target Locations: mrds_hist_dev/ARCHIVE/CSDB/
|
||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||||
@@ -13,17 +14,14 @@ SET VERIFY OFF;
|
|||||||
SET LINESIZE 200;
|
SET LINESIZE 200;
|
||||||
|
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
PROMPT MARS-835 Verification: Listing exported files in DATA and HIST buckets
|
PROMPT MARS-835 Verification: Listing exported files in HIST bucket (HIST-only strategy)
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
vFileCount NUMBER := 0;
|
vFileCount NUMBER := 0;
|
||||||
vTotalDataFiles NUMBER := 0;
|
|
||||||
vTotalHistFiles NUMBER := 0;
|
vTotalHistFiles NUMBER := 0;
|
||||||
vTotalDataSize NUMBER := 0;
|
|
||||||
vTotalHistSize NUMBER := 0;
|
vTotalHistSize NUMBER := 0;
|
||||||
|
|
||||||
TYPE t_folder_info IS RECORD (
|
TYPE t_folder_info IS RECORD (
|
||||||
@@ -33,25 +31,18 @@ DECLARE
|
|||||||
);
|
);
|
||||||
TYPE t_folder_list IS TABLE OF t_folder_info;
|
TYPE t_folder_list IS TABLE OF t_folder_info;
|
||||||
|
|
||||||
vDataFolders t_folder_list;
|
|
||||||
vHistFolders t_folder_list;
|
vHistFolders t_folder_list;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs and credential from FILE_MANAGER
|
-- Get bucket URI and credential from FILE_MANAGER
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
-- Initialize folder lists
|
-- Initialize folder list (all tables in HIST)
|
||||||
vDataFolders := t_folder_list(
|
-- Initialize folder list (all 6 tables in HIST)
|
||||||
t_folder_info('ODS/CSDB/CSDB_DEBT/', 'DEBT', 'CSV'),
|
|
||||||
t_folder_info('ODS/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'CSV')
|
|
||||||
);
|
|
||||||
|
|
||||||
vHistFolders := t_folder_list(
|
vHistFolders := t_folder_list(
|
||||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT/', 'DEBT', 'Parquet'),
|
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT/', 'DEBT', 'Parquet'),
|
||||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'Parquet'),
|
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'Parquet'),
|
||||||
@@ -62,49 +53,7 @@ BEGIN
|
|||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket Exports (CSV format - last 6 months)');
|
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket Exports (Parquet with Hive partitioning - ALL data)');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
|
|
||||||
-- Check DATA bucket exports
|
|
||||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
|
||||||
vFileCount := 0;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Table: ' || vDataFolders(i).table_name || ' (' || vDataFolders(i).expected_format || ')');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i).folder_name);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
|
||||||
|
|
||||||
BEGIN
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || vDataFolders(i).folder_name
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE '%.csv'
|
|
||||||
ORDER BY created DESC
|
|
||||||
) LOOP
|
|
||||||
vFileCount := vFileCount + 1;
|
|
||||||
vTotalDataFiles := vTotalDataFiles + 1;
|
|
||||||
vTotalDataSize := vTotalDataSize + rec.bytes;
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
|
||||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
IF vFileCount = 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] No CSV files found - Export may have failed!');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || ' CSV file(s)');
|
|
||||||
END IF;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] Cannot access folder - ' || SQLERRM);
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket Exports (Parquet with Hive partitioning)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
|
||||||
-- Check HIST bucket exports
|
-- Check HIST bucket exports
|
||||||
@@ -155,24 +104,19 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Export Verification Summary');
|
DBMS_OUTPUT.PUT_LINE('Export Verification Summary');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket (CSV):');
|
DBMS_OUTPUT.PUT_LINE('HIST Bucket (Parquet - HIST-only strategy):');
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalDataFiles);
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalDataSize/1024/1024/1024, 2) || ' GB');
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 2 (DEBT, DEBT_DAILY - last 6 months)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket (Parquet):');
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalHistFiles || '+');
|
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalHistFiles || '+');
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalHistSize/1024/1024/1024, 2) || '+ GB (sample)');
|
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalHistSize/1024/1024/1024, 2) || '+ GB (sample)');
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 6 (all CSDB tables with historical data)');
|
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 6 (all CSDB tables exported to HIST)');
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
IF vTotalDataFiles >= 2 AND vTotalHistFiles >= 6 THEN
|
IF vTotalHistFiles >= 6 THEN
|
||||||
DBMS_OUTPUT.PUT_LINE('[SUCCESS] OVERALL STATUS: Export appears SUCCESSFUL');
|
DBMS_OUTPUT.PUT_LINE('[SUCCESS] OVERALL STATUS: Export appears SUCCESSFUL');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Files found in both DATA and HIST buckets');
|
DBMS_OUTPUT.PUT_LINE(' Files found in HIST bucket for all tables');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Proceed to record count verification (Step 4)');
|
DBMS_OUTPUT.PUT_LINE(' Proceed to record count verification (Step 4)');
|
||||||
ELSIF vTotalDataFiles = 0 AND vTotalHistFiles = 0 THEN
|
ELSIF vTotalHistFiles = 0 THEN
|
||||||
DBMS_OUTPUT.PUT_LINE('[FAILED] OVERALL STATUS: Export FAILED');
|
DBMS_OUTPUT.PUT_LINE('[FAILED] OVERALL STATUS: Export FAILED');
|
||||||
DBMS_OUTPUT.PUT_LINE(' No files found in either bucket');
|
DBMS_OUTPUT.PUT_LINE(' No files found in HIST bucket');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Review export logs for errors');
|
DBMS_OUTPUT.PUT_LINE(' Review export logs for errors');
|
||||||
ELSE
|
ELSE
|
||||||
DBMS_OUTPUT.PUT_LINE('[WARNING] OVERALL STATUS: Partial export detected');
|
DBMS_OUTPUT.PUT_LINE('[WARNING] OVERALL STATUS: Partial export detected');
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
-- Script: 04_MARS_835_verify_record_counts.sql
|
-- Script: 04_MARS_835_verify_record_counts.sql
|
||||||
-- Purpose: Verify record counts match between source tables and exported data
|
-- Purpose: Verify record counts match between source tables and exported data (HIST-only)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Created: 2025-12-17
|
-- Created: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only verification)
|
||||||
-- MARS Issue: MARS-835
|
-- MARS Issue: MARS-835
|
||||||
-- Verification: Compare OU_CSDB source tables with ODS external tables
|
-- Verification: Compare OU_CSDB source tables with ODS external tables (HIST only)
|
||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||||
@@ -13,28 +14,23 @@ SET VERIFY OFF;
|
|||||||
SET LINESIZE 200;
|
SET LINESIZE 200;
|
||||||
|
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
PROMPT MARS-835 Record Count Verification
|
PROMPT MARS-835 Record Count Verification (HIST-only strategy)
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
PROMPT Comparing source table counts with exported external table counts
|
PROMPT Comparing source table counts with HIST external table counts
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
TYPE t_table_info IS RECORD (
|
TYPE t_table_info IS RECORD (
|
||||||
source_schema VARCHAR2(50),
|
source_schema VARCHAR2(50),
|
||||||
source_table VARCHAR2(100),
|
source_table VARCHAR2(100),
|
||||||
data_external_table VARCHAR2(100),
|
hist_external_table VARCHAR2(100)
|
||||||
hist_external_table VARCHAR2(100),
|
|
||||||
has_data_export BOOLEAN,
|
|
||||||
has_hist_export BOOLEAN
|
|
||||||
);
|
);
|
||||||
TYPE t_table_list IS TABLE OF t_table_info;
|
TYPE t_table_list IS TABLE OF t_table_info;
|
||||||
|
|
||||||
vTables t_table_list;
|
vTables t_table_list;
|
||||||
vSourceCount NUMBER;
|
vSourceCount NUMBER;
|
||||||
vDataCount NUMBER;
|
|
||||||
vHistCount NUMBER;
|
vHistCount NUMBER;
|
||||||
vTotalSourceCount NUMBER := 0;
|
vTotalSourceCount NUMBER := 0;
|
||||||
vTotalDataCount NUMBER := 0;
|
|
||||||
vTotalHistCount NUMBER := 0;
|
vTotalHistCount NUMBER := 0;
|
||||||
vMismatchCount NUMBER := 0;
|
vMismatchCount NUMBER := 0;
|
||||||
vSql VARCHAR2(4000);
|
vSql VARCHAR2(4000);
|
||||||
@@ -42,18 +38,18 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
-- Initialize table list with export configuration
|
-- Initialize table list (all tables HIST-only)
|
||||||
vTables := t_table_list(
|
vTables := t_table_list(
|
||||||
t_table_info('OU_CSDB', 'LEGACY_DEBT', 'ODS.CSDB_DEBT_ODS', 'ODS.CSDB_DEBT_ARCHIVE', TRUE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_DEBT', 'ODS.CSDB_DEBT_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_DEBT_DAILY', 'ODS.CSDB_DEBT_DAILY_ODS', 'ODS.CSDB_DEBT_DAILY_ARCHIVE', TRUE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_DEBT_DAILY', 'ODS.CSDB_DEBT_DAILY_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_RAT_FULL', NULL, 'ODS.CSDB_INSTR_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_INSTR_RAT_FULL', 'ODS.CSDB_INSTR_RAT_FULL_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_DESC_FULL', NULL, 'ODS.CSDB_INSTR_DESC_FULL_ARCHIVE', FALSE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_INSTR_DESC_FULL', 'ODS.CSDB_INSTR_DESC_FULL_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_RAT_FULL', NULL, 'ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_ISSUER_RAT_FULL', 'ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_DESC_FULL', NULL, 'ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE', FALSE, TRUE)
|
t_table_info('OU_CSDB', 'LEGACY_ISSUER_DESC_FULL', 'ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE')
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
DBMS_OUTPUT.PUT_LINE('Table Name Source Count DATA Count HIST Count Status');
|
DBMS_OUTPUT.PUT_LINE('Table Name Source Count HIST Count Status');
|
||||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
|
|
||||||
FOR i IN 1..vTables.COUNT LOOP
|
FOR i IN 1..vTables.COUNT LOOP
|
||||||
@@ -70,31 +66,6 @@ BEGIN
|
|||||||
CONTINUE;
|
CONTINUE;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
-- Get DATA external table count (if applicable)
|
|
||||||
IF vTables(i).has_data_export THEN
|
|
||||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).data_external_table;
|
|
||||||
BEGIN
|
|
||||||
EXECUTE IMMEDIATE vSql INTO vDataCount;
|
|
||||||
vTotalDataCount := vTotalDataCount + vDataCount;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
-- If source table is empty (0 records), no files were exported
|
|
||||||
-- External table returns error, treat as 0
|
|
||||||
-- Acceptable error codes:
|
|
||||||
-- ORA-29913: error in executing ODCIEXTTABLEOPEN callout
|
|
||||||
-- ORA-29400: data cartridge error
|
|
||||||
-- KUP-13023: nothing matched wildcard query (no files in bucket)
|
|
||||||
-- NOTE: ORA-30653 (reject limit) is a real data quality error, not treated as empty
|
|
||||||
IF vSourceCount = 0 OR SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
|
||||||
vDataCount := 0;
|
|
||||||
ELSE
|
|
||||||
vDataCount := -1;
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
ELSE
|
|
||||||
vDataCount := NULL;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Get HIST external table count
|
-- Get HIST external table count
|
||||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).hist_external_table;
|
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).hist_external_table;
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -119,18 +90,8 @@ BEGIN
|
|||||||
-- Display results
|
-- Display results
|
||||||
DECLARE
|
DECLARE
|
||||||
vStatus VARCHAR2(20);
|
vStatus VARCHAR2(20);
|
||||||
vDataDisplay VARCHAR2(17);
|
|
||||||
vHistDisplay VARCHAR2(17);
|
vHistDisplay VARCHAR2(17);
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Format DATA count display
|
|
||||||
IF vDataCount IS NULL THEN
|
|
||||||
vDataDisplay := 'N/A';
|
|
||||||
ELSIF vDataCount = -1 THEN
|
|
||||||
vDataDisplay := 'ERROR';
|
|
||||||
ELSE
|
|
||||||
vDataDisplay := TO_CHAR(vDataCount, '9,999,999,999');
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Format HIST count display
|
-- Format HIST count display
|
||||||
IF vHistCount = -1 THEN
|
IF vHistCount = -1 THEN
|
||||||
vHistDisplay := 'ERROR';
|
vHistDisplay := 'ERROR';
|
||||||
@@ -138,20 +99,7 @@ BEGIN
|
|||||||
vHistDisplay := TO_CHAR(vHistCount, '9,999,999,999');
|
vHistDisplay := TO_CHAR(vHistCount, '9,999,999,999');
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Determine status
|
-- Determine status (HIST only: check HIST = SOURCE)
|
||||||
IF vTables(i).has_data_export THEN
|
|
||||||
-- Split export: check DATA + HIST = SOURCE
|
|
||||||
IF (vDataCount + vHistCount) = vSourceCount THEN
|
|
||||||
vStatus := 'PASS';
|
|
||||||
ELSIF vDataCount = -1 OR vHistCount = -1 THEN
|
|
||||||
vStatus := 'ERROR';
|
|
||||||
vMismatchCount := vMismatchCount + 1;
|
|
||||||
ELSE
|
|
||||||
vStatus := 'MISMATCH';
|
|
||||||
vMismatchCount := vMismatchCount + 1;
|
|
||||||
END IF;
|
|
||||||
ELSE
|
|
||||||
-- HIST only: check HIST = SOURCE
|
|
||||||
IF vHistCount = vSourceCount THEN
|
IF vHistCount = vSourceCount THEN
|
||||||
vStatus := 'PASS';
|
vStatus := 'PASS';
|
||||||
ELSIF vHistCount = -1 THEN
|
ELSIF vHistCount = -1 THEN
|
||||||
@@ -161,12 +109,10 @@ BEGIN
|
|||||||
vStatus := 'MISMATCH';
|
vStatus := 'MISMATCH';
|
||||||
vMismatchCount := vMismatchCount + 1;
|
vMismatchCount := vMismatchCount + 1;
|
||||||
END IF;
|
END IF;
|
||||||
END IF;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE(
|
DBMS_OUTPUT.PUT_LINE(
|
||||||
RPAD(vTables(i).source_table, 24) ||
|
RPAD(vTables(i).source_table, 24) ||
|
||||||
LPAD(TO_CHAR(vSourceCount, '9,999,999,999'), 15) ||
|
LPAD(TO_CHAR(vSourceCount, '9,999,999,999'), 15) ||
|
||||||
LPAD(vDataDisplay, 15) ||
|
|
||||||
LPAD(vHistDisplay, 15) || ' ' ||
|
LPAD(vHistDisplay, 15) || ' ' ||
|
||||||
vStatus
|
vStatus
|
||||||
);
|
);
|
||||||
@@ -177,18 +123,16 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE(
|
DBMS_OUTPUT.PUT_LINE(
|
||||||
RPAD('TOTALS', 24) ||
|
RPAD('TOTALS', 24) ||
|
||||||
LPAD(TO_CHAR(vTotalSourceCount, '9,999,999,999'), 15) ||
|
LPAD(TO_CHAR(vTotalSourceCount, '9,999,999,999'), 15) ||
|
||||||
LPAD(TO_CHAR(vTotalDataCount, '9,999,999,999'), 15) ||
|
|
||||||
LPAD(TO_CHAR(vTotalHistCount, '9,999,999,999'), 15)
|
LPAD(TO_CHAR(vTotalHistCount, '9,999,999,999'), 15)
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary');
|
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary (HIST-only strategy)');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '9,999,999,999'));
|
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '9,999,999,999'));
|
||||||
DBMS_OUTPUT.PUT_LINE('Total DATA records: ' || TO_CHAR(vTotalDataCount, '9,999,999,999') || ' (last 6 months)');
|
DBMS_OUTPUT.PUT_LINE('Total HIST records: ' || TO_CHAR(vTotalHistCount, '9,999,999,999') || ' (all data in HIST)');
|
||||||
DBMS_OUTPUT.PUT_LINE('Total HIST records: ' || TO_CHAR(vTotalHistCount, '9,999,999,999') || ' (historical + full exports)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
IF vMismatchCount = 0 THEN
|
IF vMismatchCount = 0 THEN
|
||||||
@@ -209,7 +153,6 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
||||||
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (may not exist yet)');
|
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (may not exist yet)');
|
||||||
DBMS_OUTPUT.PUT_LINE(' N/A - Not applicable (table not exported to DATA)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -0,0 +1,54 @@
|
|||||||
|
--=============================================================================================================================
|
||||||
|
-- MARS-835 ROLLBACK: Delete File Registration Records
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- Purpose: Delete all file registration records from A_SOURCE_FILE_RECEIVED table for MARS-835 process
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-13
|
||||||
|
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||||
|
--=============================================================================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT ROLLBACK: Deleting file registration records from A_SOURCE_FILE_RECEIVED
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vRowCount NUMBER := 0;
|
||||||
|
vStartTime TIMESTAMP := SYSTIMESTAMP;
|
||||||
|
vEndTime TIMESTAMP;
|
||||||
|
vElapsedSeconds NUMBER;
|
||||||
|
BEGIN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Deleting all MARS-835 file registrations from A_SOURCE_FILE_RECEIVED...');
|
||||||
|
|
||||||
|
-- Delete all records for MARS-835 process
|
||||||
|
DELETE FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESS_NAME = 'MARS-835';
|
||||||
|
|
||||||
|
vRowCount := SQL%ROWCOUNT;
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
vEndTime := SYSTIMESTAMP;
|
||||||
|
vElapsedSeconds := EXTRACT(SECOND FROM (vEndTime - vStartTime)) +
|
||||||
|
EXTRACT(MINUTE FROM (vEndTime - vStartTime)) * 60 +
|
||||||
|
EXTRACT(HOUR FROM (vEndTime - vStartTime)) * 3600;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: File registration records deleted');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Records deleted: ' || vRowCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Elapsed time: ' || ROUND(vElapsedSeconds, 2) || ' seconds');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
ROLLBACK;
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to delete file registration records');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Error message: ' || SQLERRM);
|
||||||
|
RAISE;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- End of Script
|
||||||
|
--=============================================================================================================================
|
||||||
@@ -1,76 +1,43 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-835 ROLLBACK: Delete Group 1 Exported Files (DEBT, DEBT_DAILY)
|
-- MARS-835 ROLLBACK: Delete Group 1 Exported Files (DEBT, DEBT_DAILY)
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Delete exported CSV and Parquet files from DATA and HIST buckets
|
-- Purpose: Delete exported Parquet files from HIST bucket (ARCHIVE only)
|
||||||
-- WARNING: This will permanently delete exported data files!
|
-- WARNING: This will permanently delete exported data files!
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-12-17
|
-- Date: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only rollback, no DATA bucket)
|
||||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT ROLLBACK: Deleting DEBT exported files
|
PROMPT ROLLBACK: Deleting DEBT exported files from HIST
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT WARNING: This will delete files from:
|
PROMPT WARNING: This will delete files from:
|
||||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT/
|
|
||||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/
|
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs and credential
|
-- Get bucket URI and credential
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from DATA bucket...');
|
DBMS_OUTPUT.PUT_LINE('Deleting DEBT Parquet files from ARCHIVE bucket...');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS');
|
||||||
|
|
||||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||||
-- Pattern matches: LEGACY_DEBT_YYYYMM.csv OR LEGACY_DEBT_YYYYMM_1_20260122T...Z.csv (Oracle timestamp)
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE 'LEGACY_DEBT_%'
|
|
||||||
AND object_name LIKE '%.csv'
|
|
||||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
|
||||||
) LOOP
|
|
||||||
BEGIN
|
|
||||||
DBMS_CLOUD.DELETE_OBJECT(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
|
|
||||||
);
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
IF SQLCODE = -20404 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
|
||||||
ELSE
|
|
||||||
RAISE;
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from HIST bucket...');
|
|
||||||
|
|
||||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
|
||||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_20260122T...Z.parquet (Oracle timestamp)
|
|
||||||
FOR rec IN (
|
FOR rec IN (
|
||||||
SELECT object_name
|
SELECT object_name
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
credential_name => vCredentialName,
|
credential_name => vCredentialName,
|
||||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
|
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
|
||||||
))
|
))
|
||||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
WHERE object_name NOT LIKE '%/' -- Exclude directories
|
||||||
AND object_name LIKE '%.parquet'
|
|
||||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
|
||||||
) LOOP
|
) LOOP
|
||||||
BEGIN
|
BEGIN
|
||||||
DBMS_CLOUD.DELETE_OBJECT(
|
DBMS_CLOUD.DELETE_OBJECT(
|
||||||
@@ -78,6 +45,7 @@ BEGIN
|
|||||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
|
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||||
|
vFileCount := vFileCount + 1;
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
WHEN OTHERS THEN
|
WHEN OTHERS THEN
|
||||||
IF SQLCODE = -20404 THEN
|
IF SQLCODE = -20404 THEN
|
||||||
@@ -88,71 +56,41 @@ BEGIN
|
|||||||
END;
|
END;
|
||||||
END LOOP;
|
END LOOP;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT files deleted');
|
IF vFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT Parquet files found to delete');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
|
||||||
END;
|
END;
|
||||||
/
|
/
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT ROLLBACK: Deleting DEBT_DAILY exported files
|
PROMPT ROLLBACK: Deleting DEBT_DAILY exported files from HIST
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT WARNING: This will delete files from:
|
PROMPT WARNING: This will delete files from:
|
||||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/
|
|
||||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/
|
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs and credential
|
-- Get bucket URI and credential
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from DATA bucket...');
|
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY Parquet files from ARCHIVE bucket...');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS');
|
||||||
|
|
||||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||||
-- Pattern matches: LEGACY_DEBT_DAILY_YYYYMM.csv OR LEGACY_DEBT_DAILY_YYYYMM_1_timestamp.csv
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY_%'
|
|
||||||
AND object_name LIKE '%.csv'
|
|
||||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_DAILY_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
|
||||||
) LOOP
|
|
||||||
BEGIN
|
|
||||||
DBMS_CLOUD.DELETE_OBJECT(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
|
||||||
);
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
IF SQLCODE = -20404 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
|
||||||
ELSE
|
|
||||||
RAISE;
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from HIST bucket...');
|
|
||||||
|
|
||||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
|
||||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_timestamp.parquet
|
|
||||||
FOR rec IN (
|
FOR rec IN (
|
||||||
SELECT object_name
|
SELECT object_name
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
credential_name => vCredentialName,
|
credential_name => vCredentialName,
|
||||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
|
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
|
||||||
))
|
))
|
||||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
WHERE object_name NOT LIKE '%/' -- Exclude directories
|
||||||
AND object_name LIKE '%.parquet'
|
|
||||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
|
||||||
) LOOP
|
) LOOP
|
||||||
BEGIN
|
BEGIN
|
||||||
DBMS_CLOUD.DELETE_OBJECT(
|
DBMS_CLOUD.DELETE_OBJECT(
|
||||||
@@ -160,6 +98,7 @@ BEGIN
|
|||||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||||
|
vFileCount := vFileCount + 1;
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
WHEN OTHERS THEN
|
WHEN OTHERS THEN
|
||||||
IF SQLCODE = -20404 THEN
|
IF SQLCODE = -20404 THEN
|
||||||
@@ -170,7 +109,11 @@ BEGIN
|
|||||||
END;
|
END;
|
||||||
END LOOP;
|
END LOOP;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY files deleted');
|
IF vFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT_DAILY Parquet files found to delete');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
|
||||||
END;
|
END;
|
||||||
/
|
/
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
-- Script: 99_MARS_835_verify_rollback.sql
|
-- Script: 99_MARS_835_verify_rollback.sql
|
||||||
-- Purpose: Verify all exported files have been deleted from DATA and HIST buckets
|
-- Purpose: Verify all exported files have been deleted from HIST bucket (HIST-only strategy)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Created: 2025-12-17
|
-- Created: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only verification)
|
||||||
-- MARS Issue: MARS-835
|
-- MARS Issue: MARS-835
|
||||||
-- Verification: Confirm complete rollback (no CSDB files remaining)
|
-- Verification: Confirm complete rollback (no CSDB files remaining in HIST)
|
||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||||
@@ -19,33 +20,23 @@ PROMPT Checking that all CSDB export files have been deleted
|
|||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
vDataFileCount NUMBER := 0;
|
|
||||||
vHistFileCount NUMBER := 0;
|
vHistFileCount NUMBER := 0;
|
||||||
vTotalFiles NUMBER := 0;
|
|
||||||
|
|
||||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||||
vDataFolders t_folder_list;
|
|
||||||
vHistFolders t_folder_list;
|
vHistFolders t_folder_list;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs
|
-- Get bucket URI
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
DBMS_OUTPUT.PUT_LINE('ROLLBACK VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
-- Initialize folder lists
|
-- Initialize folder list (all 6 tables in HIST)
|
||||||
vDataFolders := t_folder_list(
|
-- Initialize folder list (all 6 tables in HIST)
|
||||||
'ODS/CSDB/CSDB_DEBT/',
|
|
||||||
'ODS/CSDB/CSDB_DEBT_DAILY/'
|
|
||||||
);
|
|
||||||
|
|
||||||
vHistFolders := t_folder_list(
|
vHistFolders := t_folder_list(
|
||||||
'ARCHIVE/CSDB/CSDB_DEBT/',
|
'ARCHIVE/CSDB/CSDB_DEBT/',
|
||||||
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
||||||
@@ -55,47 +46,6 @@ BEGIN
|
|||||||
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket (should be empty)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
|
|
||||||
-- Check DATA bucket
|
|
||||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
|
||||||
DECLARE
|
|
||||||
vCount NUMBER := 0;
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i));
|
|
||||||
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || vDataFolders(i)
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE '%.csv'
|
|
||||||
) LOOP
|
|
||||||
vCount := vCount + 1;
|
|
||||||
vDataFileCount := vDataFileCount + 1;
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [FOUND] ' || rec.object_name);
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
IF vCount = 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [OK] No CSV files found');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Found ' || vCount || ' file(s) - may be pre-existing files from before installation');
|
|
||||||
END IF;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
IF SQLCODE = -20404 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [OK] Folder does not exist or is empty');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] ' || SQLERRM);
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (should be empty)');
|
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (should be empty)');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
@@ -139,24 +89,21 @@ BEGIN
|
|||||||
END;
|
END;
|
||||||
END LOOP;
|
END LOOP;
|
||||||
|
|
||||||
vTotalFiles := vDataFileCount + vHistFileCount;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Rollback Verification Summary');
|
DBMS_OUTPUT.PUT_LINE('Rollback Verification Summary');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA bucket files remaining: ' || vDataFileCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST bucket files remaining: ' || vHistFileCount || '+');
|
DBMS_OUTPUT.PUT_LINE('HIST bucket files remaining: ' || vHistFileCount || '+');
|
||||||
DBMS_OUTPUT.PUT_LINE('Total files found: ' || vTotalFiles || '+');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
IF vTotalFiles = 0 THEN
|
IF vHistFileCount = 0 THEN
|
||||||
DBMS_OUTPUT.PUT_LINE('[PASSED] ROLLBACK VERIFICATION PASSED');
|
DBMS_OUTPUT.PUT_LINE('[PASSED] ROLLBACK VERIFICATION PASSED');
|
||||||
DBMS_OUTPUT.PUT_LINE(' All CSDB export files have been deleted or were not created');
|
DBMS_OUTPUT.PUT_LINE(' All CSDB export files have been deleted or were not created');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Buckets are clean and ready for re-export if needed');
|
DBMS_OUTPUT.PUT_LINE(' HIST bucket is clean and ready for re-export if needed');
|
||||||
ELSE
|
ELSE
|
||||||
DBMS_OUTPUT.PUT_LINE('[INFO] ROLLBACK VERIFICATION COMPLETED');
|
DBMS_OUTPUT.PUT_LINE('[INFO] ROLLBACK VERIFICATION COMPLETED');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Found ' || vTotalFiles || '+ file(s) remaining in buckets');
|
DBMS_OUTPUT.PUT_LINE(' Found ' || vHistFileCount || '+ file(s) remaining in HIST bucket');
|
||||||
DBMS_OUTPUT.PUT_LINE(' NOTE: These may be pre-existing files from before installation.');
|
DBMS_OUTPUT.PUT_LINE(' NOTE: These may be pre-existing files from before installation.');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Rollback only deletes files created during this export operation.');
|
DBMS_OUTPUT.PUT_LINE(' Rollback only deletes files created during this export operation.');
|
||||||
DBMS_OUTPUT.PUT_LINE(' If needed, manually verify and clean up remaining files.');
|
DBMS_OUTPUT.PUT_LINE(' If needed, manually verify and clean up remaining files.');
|
||||||
|
|||||||
@@ -1,165 +0,0 @@
|
|||||||
# MARS-835: One-Time CSDB Data Export from Operational Database to External Tables
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
This package performs a one-time bulk export of CSDB data from operational database tables (OU_CSDB schema) to new external tables in OCI buckets. The export uses DATA_EXPORTER v2.4.0 with per-column date format handling to move historical data to either DATA bucket (CSV format) or HIST bucket (Parquet format with Hive-style partitioning).
|
|
||||||
|
|
||||||
**Migration Strategy:**
|
|
||||||
- **Split Export (2 tables)**: DEBT, DEBT_DAILY - Last 6 months → DATA (CSV), Older data → HIST (Parquet)
|
|
||||||
- **HIST Only (4 tables)**: INSTR_RAT_FULL, INSTR_DESC_FULL, ISSUER_RAT_FULL, ISSUER_DESC_FULL - All data → HIST (Parquet)
|
|
||||||
|
|
||||||
**Key Transformations:**
|
|
||||||
- Column rename: `A_ETL_LOAD_SET_FK` → `A_WORKFLOW_HISTORY_KEY` (all tables)
|
|
||||||
- Column removal: DEBT (2 columns), DEBT_DAILY (6 columns) not required in new structure
|
|
||||||
|
|
||||||
## Contents
|
|
||||||
- `install_mars835.sql` - Master installation script with SPOOL logging
|
|
||||||
- `rollback_mars835.sql` - Master rollback script
|
|
||||||
- `01_MARS_835_*.sql` - Individual installation scripts
|
|
||||||
- `91_MARS_835_*.sql` - Individual rollback scripts
|
|
||||||
- `track_package_versions.sql` - Package version tracking
|
|
||||||
- `verify_packages_version.sql` - Package verification
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
- Oracle Database 23ai
|
|
||||||
- ADMIN user access (required for all MARS installations)
|
|
||||||
- ENV_MANAGER v3.1.0+
|
|
||||||
- Required schema privileges
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
### Option 1: Master Script (Recommended)
|
|
||||||
```powershell
|
|
||||||
# IMPORTANT: Execute as ADMIN user for proper privilege management
|
|
||||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/install_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
|
||||||
|
|
||||||
# Log file created: log/INSTALL_MARS_835_<PDB>_<timestamp>.log
|
|
||||||
```
|
|
||||||
|
|
||||||
### Option 2: Individual Scripts
|
|
||||||
```powershell
|
|
||||||
# IMPORTANT: Execute as ADMIN user
|
|
||||||
Get-Content "01_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
|
||||||
Get-Content "02_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
|
||||||
# ... etc
|
|
||||||
```
|
|
||||||
|
|
||||||
## Verification
|
|
||||||
```sql
|
|
||||||
-- Verify package versions
|
|
||||||
SELECT PACKAGE_NAME.GET_VERSION() FROM DUAL;
|
|
||||||
|
|
||||||
-- Check for errors (ADMIN user checks specific schema)
|
|
||||||
SELECT * FROM ALL_ERRORS
|
|
||||||
WHERE OWNER = 'CT_MRDS' -- Replace with target schema
|
|
||||||
AND NAME = 'PACKAGE_NAME';
|
|
||||||
|
|
||||||
-- Verify no untracked changes
|
|
||||||
SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'PACKAGE_NAME') FROM DUAL;
|
|
||||||
```
|
|
||||||
|
|
||||||
## Rollback
|
|
||||||
```powershell
|
|
||||||
# IMPORTANT: Execute as ADMIN user
|
|
||||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/rollback_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
|
||||||
|
|
||||||
**NOTE**: Rollback for data exports is **NOT RECOMMENDED** as it would delete exported files from OCI buckets. Only use rollback if export failed and needs to be restarted.
|
|
||||||
```
|
|
||||||
|
|
||||||
## Expected Changes
|
|
||||||
|
|
||||||
### Data Export Summary
|
|
||||||
**6 CSDB tables exported from OU_CSDB schema:**
|
|
||||||
|
|
||||||
**Group 1: Split DATA + HIST (Time Critical)**
|
|
||||||
1. **DEBT** - Last 6 months → DATA, Older → HIST
|
|
||||||
2. **DEBT_DAILY** - Last 6 months → DATA, Older → HIST
|
|
||||||
|
|
||||||
**Group 2: HIST Only (Weekend Bulk)**
|
|
||||||
3. **INSTR_RAT_FULL** - All data → HIST
|
|
||||||
4. **INSTR_DESC_FULL** - All data → HIST
|
|
||||||
5. **ISSUER_RAT_FULL** - All data → HIST
|
|
||||||
6. **ISSUER_DESC_FULL** - All data → HIST
|
|
||||||
|
|
||||||
### Bucket Destinations (DEV environment)
|
|
||||||
- **DATA Bucket**: `mrds_data_dev/ODS/CSDB/` (CSV format)
|
|
||||||
- **HIST Bucket**: `mrds_hist_dev/ARCHIVE/CSDB/` (Parquet with partitioning)
|
|
||||||
|
|
||||||
### Column Mappings
|
|
||||||
- **All tables**: `A_ETL_LOAD_SET_FK` renamed to `A_WORKFLOW_HISTORY_KEY`
|
|
||||||
- **DEBT**: Removed columns: `IDIRDEPOSITORY`, `VA_BONDDURATION`
|
|
||||||
- **DEBT_DAILY**: Removed columns: `STEPID`, `PROGRAMNAME`, `PROGRAMCEILING`, `PROGRAMSTATUS`, `ISSUERNACE21SECTOR`, `INSTRUMENTQUOTATIONBASIS`
|
|
||||||
|
|
||||||
## Testing
|
|
||||||
|
|
||||||
### Post-Export Verification
|
|
||||||
|
|
||||||
1. **Verify CSV files in DATA bucket** (DEBT, DEBT_DAILY - last 6 months):
|
|
||||||
```sql
|
|
||||||
-- Check exported files
|
|
||||||
SELECT object_name, bytes
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'DEF_CRED_ARN',
|
|
||||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_data_dev/o/ODS/CSDB/'
|
|
||||||
)) WHERE object_name LIKE '%CSDB_DEBT%';
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Verify Parquet files in HIST bucket** (all 6 tables):
|
|
||||||
```sql
|
|
||||||
-- Check archived files with Hive partitioning
|
|
||||||
SELECT object_name, bytes
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'DEF_CRED_ARN',
|
|
||||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_hist_dev/o/ARCHIVE/CSDB/'
|
|
||||||
)) WHERE object_name LIKE '%PARTITION_YEAR=%';
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Validate row counts match source tables**:
|
|
||||||
```sql
|
|
||||||
-- Compare counts between source and exported data
|
|
||||||
SELECT COUNT(*) FROM OU_CSDB.DEBT;
|
|
||||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS; -- External table pointing to DATA
|
|
||||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ARCHIVE; -- External table pointing to HIST
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Verify column mappings**:
|
|
||||||
```sql
|
|
||||||
-- Check A_WORKFLOW_HISTORY_KEY exists in exported data
|
|
||||||
SELECT A_WORKFLOW_HISTORY_KEY, COUNT(*)
|
|
||||||
FROM ODS.CSDB_DEBT_ARCHIVE
|
|
||||||
GROUP BY A_WORKFLOW_HISTORY_KEY;
|
|
||||||
```
|
|
||||||
|
|
||||||
## Known Issues
|
|
||||||
|
|
||||||
### Timing Constraints
|
|
||||||
- **DATA exports (DEBT, DEBT_DAILY)**: Must execute during parallel old+new loads phase after Production deployment
|
|
||||||
- **HIST exports (all 6 tables)**: Can run anytime, recommended for weekend bulk execution to avoid interference
|
|
||||||
|
|
||||||
### Environment-Specific Configuration
|
|
||||||
- Bucket names must be adjusted for each environment:
|
|
||||||
- DEV: `mrds_data_dev`, `mrds_hist_dev`
|
|
||||||
- TEST: `mrds_data_test`, `mrds_hist_test`
|
|
||||||
- PROD: `mrds_data_prod`, `mrds_hist_prod`
|
|
||||||
|
|
||||||
### Data Cutoff Date
|
|
||||||
- Export scripts use 6-month cutoff date calculated as `ADD_MONTHS(SYSDATE, -6)`
|
|
||||||
- Verify cutoff aligns with business requirements before execution
|
|
||||||
|
|
||||||
### One-Time Execution
|
|
||||||
- This is a **ONE-TIME data migration** package
|
|
||||||
- After successful execution, package should be **deactivated** (moved to REL01_POST_DEACTIVATION)
|
|
||||||
- Do not re-run unless explicitly required for data refresh
|
|
||||||
|
|
||||||
## Related
|
|
||||||
- **JIRA**: MARS-835 - CSDB Data Export to External Tables
|
|
||||||
- **Confluence**: FILE_MANAGER package - MRDS - Technical Team
|
|
||||||
- **Confluence**: Table Setup Guide for FILE PROCESSOR System
|
|
||||||
- **Source Schema**: OU_CSDB (Operational Database)
|
|
||||||
- **Target Schema**: ODS (External Tables)
|
|
||||||
- **Migration Type**: One-time bulk export (deactivated post-execution)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Author:** Grzegorz Michalski
|
|
||||||
**Date:** 2025-12-04
|
|
||||||
**Version:** 1.0.0
|
|
||||||
@@ -1,207 +0,0 @@
|
|||||||
# MARS-835: Required External Tables for Smart Column Mapping
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
This document lists all external tables required for MARS-835 data exports using DATA_EXPORTER v2.4.0 with Smart Column Mapping feature.
|
|
||||||
|
|
||||||
**Purpose**: Smart Column Mapping ensures CSV files are generated with columns in the EXACT order expected by external tables, preventing NULL values due to Oracle's positional CSV mapping.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Required External Tables
|
|
||||||
|
|
||||||
### Group 1: DATA Bucket (CSV Format) - **CRITICAL**
|
|
||||||
|
|
||||||
#### 1. ODS.CSDB_DEBT_DATA_ODS
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
|
||||||
- **Format**: CSV
|
|
||||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
|
||||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
|
||||||
|
|
||||||
#### 2. ODS.CSDB_DEBT_DAILY_DATA_ODS
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
|
||||||
- **Format**: CSV
|
|
||||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
|
||||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Group 2: ARCHIVE Bucket (Parquet Format) - **RECOMMENDED**
|
|
||||||
|
|
||||||
#### 3. ODS.CSDB_DEBT_ARCHIVE
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
|
||||||
- **Format**: Parquet with Hive partitioning
|
|
||||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
|
||||||
- **Note**: Parquet uses schema-based mapping (column order less critical but Smart Column Mapping ensures consistency)
|
|
||||||
|
|
||||||
#### 4. ODS.CSDB_DEBT_DAILY_ARCHIVE
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
|
||||||
- **Format**: Parquet with Hive partitioning
|
|
||||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
|
||||||
|
|
||||||
#### 5. ODS.CSDB_INSTR_RAT_FULL_ARCHIVE
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_RAT_FULL
|
|
||||||
- **Format**: Parquet with Hive partitioning
|
|
||||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
|
||||||
|
|
||||||
#### 6. ODS.CSDB_INSTR_DESC_FULL_ARCHIVE
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_DESC_FULL
|
|
||||||
- **Format**: Parquet with Hive partitioning
|
|
||||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
|
||||||
|
|
||||||
#### 7. ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_RAT_FULL
|
|
||||||
- **Format**: Parquet with Hive partitioning
|
|
||||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
|
||||||
|
|
||||||
#### 8. ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE
|
|
||||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_DESC_FULL
|
|
||||||
- **Format**: Parquet with Hive partitioning
|
|
||||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/)
|
|
||||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## External Table Column Order Requirements
|
|
||||||
|
|
||||||
### **CRITICAL for CSV Tables** (DATA bucket):
|
|
||||||
|
|
||||||
All CSV external tables MUST have **A_WORKFLOW_HISTORY_KEY at position 2**:
|
|
||||||
|
|
||||||
```
|
|
||||||
Position 1: A_KEY (NUMBER)
|
|
||||||
Position 2: A_WORKFLOW_HISTORY_KEY (NUMBER) ← MUST BE HERE!
|
|
||||||
Position 3+: Other columns in any order
|
|
||||||
```
|
|
||||||
|
|
||||||
**Reason**: Oracle External Tables with CSV format use **positional mapping** (ignore header row). If source table has A_ETL_LOAD_SET_FK at position 72, but CSV puts it at position 72 while external table expects A_WORKFLOW_HISTORY_KEY at position 2, the external table will try to read position 2 (which might be a DATE column) as NUMBER → conversion fails → NULL value.
|
|
||||||
|
|
||||||
**Solution**: Smart Column Mapping (v2.4.0) generates CSV columns in EXTERNAL TABLE order, ensuring position 2 has the correct NUMBER value.
|
|
||||||
|
|
||||||
### **OPTIONAL for Parquet Tables** (ARCHIVE bucket):
|
|
||||||
|
|
||||||
Parquet format uses **schema-based mapping** (column names). Column order doesn't matter, but Smart Column Mapping provides consistency.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Creation Script Example
|
|
||||||
|
|
||||||
### CSV External Table (CRITICAL - Correct Column Order)
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Example: ODS.CSDB_DEBT_DATA_ODS
|
|
||||||
-- IMPORTANT: A_WORKFLOW_HISTORY_KEY must be at position 2!
|
|
||||||
|
|
||||||
BEGIN
|
|
||||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
|
||||||
pTableName => 'CSDB_DEBT_DATA_ODS',
|
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
|
||||||
pPrefix => 'ODS/CSDB/CSDB_DEBT',
|
|
||||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvDataBucketUri,
|
|
||||||
pFormat => 'CSV' -- Uses positional mapping!
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Verify column order (A_WORKFLOW_HISTORY_KEY should be position 2)
|
|
||||||
SELECT column_id, column_name, data_type
|
|
||||||
FROM all_tab_columns
|
|
||||||
WHERE table_name = 'CSDB_DEBT_DATA_ODS'
|
|
||||||
AND owner = 'ODS'
|
|
||||||
ORDER BY column_id;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Parquet External Table (Optional Column Order)
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Example: ODS.CSDB_DEBT_ARCHIVE
|
|
||||||
-- Column order flexible (schema-based mapping)
|
|
||||||
|
|
||||||
BEGIN
|
|
||||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
|
||||||
pTableName => 'CSDB_DEBT_ARCHIVE',
|
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
|
||||||
pPrefix => 'ARCHIVE/CSDB/CSDB_DEBT',
|
|
||||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvArchiveBucketUri,
|
|
||||||
pFormat => 'PARQUET' -- Uses schema-based mapping
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Template Tables Required
|
|
||||||
|
|
||||||
All external tables require corresponding template tables in CT_ET_TEMPLATES schema:
|
|
||||||
|
|
||||||
- `CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE`
|
|
||||||
- `CT_ET_TEMPLATES.CSDB_DEBT_DAILY_TEMPLATE`
|
|
||||||
- `CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL_TEMPLATE`
|
|
||||||
- `CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL_TEMPLATE`
|
|
||||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL_TEMPLATE`
|
|
||||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL_TEMPLATE`
|
|
||||||
|
|
||||||
**Note**: Template tables must be created by ADMIN or CT_ET_TEMPLATES user (MRDS_LOADER cannot create them).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Verification Checklist
|
|
||||||
|
|
||||||
Before running MARS-835 exports:
|
|
||||||
|
|
||||||
- [ ] All 8 external tables exist in ODS schema
|
|
||||||
- [ ] CSV tables (DATA bucket) have A_WORKFLOW_HISTORY_KEY at position 2
|
|
||||||
- [ ] Template tables exist in CT_ET_TEMPLATES schema
|
|
||||||
- [ ] MRDS_LOADER has EXECUTE privilege on ODS.FILE_MANAGER_ODS
|
|
||||||
- [ ] ODS schema has access to CT_MRDS.ENV_MANAGER for logging
|
|
||||||
- [ ] DATA_EXPORTER v2.4.0 deployed with Smart Column Mapping feature
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Testing Verification
|
|
||||||
|
|
||||||
After export, verify A_WORKFLOW_HISTORY_KEY is not NULL:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- CSV tables (should be 100% populated)
|
|
||||||
SELECT 'CSDB_DEBT_DATA_ODS' AS TABLE_NAME,
|
|
||||||
COUNT(*) AS TOTAL_ROWS,
|
|
||||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
|
||||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
|
||||||
FROM ODS.CSDB_DEBT_DATA_ODS;
|
|
||||||
|
|
||||||
SELECT 'CSDB_DEBT_DAILY_DATA_ODS' AS TABLE_NAME,
|
|
||||||
COUNT(*) AS TOTAL_ROWS,
|
|
||||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
|
||||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
|
||||||
FROM ODS.CSDB_DEBT_DAILY_DATA_ODS;
|
|
||||||
|
|
||||||
-- Parquet tables (should also be 100% populated)
|
|
||||||
SELECT 'CSDB_DEBT_ARCHIVE' AS TABLE_NAME,
|
|
||||||
COUNT(*) AS TOTAL_ROWS,
|
|
||||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
|
||||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
|
||||||
FROM ODS.CSDB_DEBT_ARCHIVE;
|
|
||||||
```
|
|
||||||
|
|
||||||
**Expected Result**: SUCCESS_RATE_PCT = 100.00 for all tables
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Related Documentation
|
|
||||||
|
|
||||||
- [DATA_EXPORTER v2.4.0 Smart Column Mapping Examples](../MARS-835-PREHOOK/current_version/v2.3.0/DATA_EXPORTER_v2.4.0_Smart_Column_Mapping_Examples.sql)
|
|
||||||
- [Oracle External Tables Column Order Issue](../../confluence/additions/Oracle_External_Tables_Column_Order_Issue.md)
|
|
||||||
- [MARS-835 README](README.md)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Last Updated**: 2026-01-09
|
|
||||||
**Author**: GitHub Copilot (MARS-835 Update)
|
|
||||||
@@ -59,7 +59,13 @@ PROMPT =========================================================================
|
|||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
PROMPT Step 3: Verify Rollback Completed
|
PROMPT Step 3: Delete File Registration Records from A_SOURCE_FILE_RECEIVED
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@90_MARS_835_rollback_file_registrations.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 4: Verify Rollback Completed
|
||||||
PROMPT =========================================================================
|
PROMPT =========================================================================
|
||||||
@@99_MARS_835_verify_rollback.sql
|
@@99_MARS_835_verify_rollback.sql
|
||||||
|
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
-- ===================================================================
|
|
||||||
-- Simple Package Version Tracking Script
|
|
||||||
-- ===================================================================
|
|
||||||
-- Purpose: Track specified Oracle package versions
|
|
||||||
-- Author: Grzegorz Michalski
|
|
||||||
-- Date: 2025-12-04
|
|
||||||
-- Version: 3.1.0 - List-Based Edition
|
|
||||||
--
|
|
||||||
-- USAGE:
|
|
||||||
-- 1. Edit package list below (add/remove packages as needed)
|
|
||||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
|
||||||
-- ===================================================================
|
|
||||||
|
|
||||||
SET SERVEROUTPUT ON;
|
|
||||||
|
|
||||||
DECLARE
|
|
||||||
TYPE t_package_rec IS RECORD (
|
|
||||||
owner VARCHAR2(50),
|
|
||||||
name VARCHAR2(50),
|
|
||||||
version VARCHAR2(50)
|
|
||||||
);
|
|
||||||
TYPE t_packages IS TABLE OF t_package_rec;
|
|
||||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
|
||||||
|
|
||||||
-- ===================================================================
|
|
||||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
|
||||||
-- ===================================================================
|
|
||||||
-- Add or remove entries as needed for your MARS issue
|
|
||||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
|
||||||
-- ===================================================================
|
|
||||||
vPackageList t_string_array := t_string_array(
|
|
||||||
'CT_MRDS.FILE_MANAGER',
|
|
||||||
'ODS.FILE_MANAGER_ODS'
|
|
||||||
);
|
|
||||||
-- ===================================================================
|
|
||||||
|
|
||||||
vPackages t_packages := t_packages();
|
|
||||||
vVersion VARCHAR2(50);
|
|
||||||
vCount NUMBER := 0;
|
|
||||||
vOwner VARCHAR2(50);
|
|
||||||
vPackageName VARCHAR2(50);
|
|
||||||
vDotPos NUMBER;
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
|
||||||
|
|
||||||
-- Process each package in the list
|
|
||||||
FOR i IN 1..vPackageList.COUNT LOOP
|
|
||||||
vDotPos := INSTR(vPackageList(i), '.');
|
|
||||||
IF vDotPos > 0 THEN
|
|
||||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
|
||||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
|
||||||
|
|
||||||
BEGIN
|
|
||||||
EXECUTE IMMEDIATE 'SELECT ' || vPackageList(i) || '.GET_VERSION() FROM DUAL'
|
|
||||||
INTO vVersion;
|
|
||||||
|
|
||||||
vPackages.EXTEND;
|
|
||||||
vPackages(vPackages.COUNT).owner := vOwner;
|
|
||||||
vPackages(vPackages.COUNT).name := vPackageName;
|
|
||||||
vPackages(vPackages.COUNT).version := vVersion;
|
|
||||||
|
|
||||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
|
||||||
pPackageOwner => vOwner,
|
|
||||||
pPackageName => vPackageName,
|
|
||||||
pPackageVersion => vVersion,
|
|
||||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
|
||||||
pPackageAuthor => 'Grzegorz Michalski'
|
|
||||||
);
|
|
||||||
vCount := vCount + 1;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Error tracking ' || vPackageList(i) || ': ' || SQLERRM);
|
|
||||||
END;
|
|
||||||
END IF;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- Display results
|
|
||||||
IF vPackages.COUNT > 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || ' of ' || vPackages.COUNT);
|
|
||||||
FOR i IN 1..vPackages.COUNT LOOP
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).name ||
|
|
||||||
' (v' || vPackages(i).version || ')');
|
|
||||||
END LOOP;
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE('No packages found in list');
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
-- ===================================================================
|
|
||||||
-- Universal Package Version Verification Script
|
|
||||||
-- ===================================================================
|
|
||||||
-- Purpose: Verify all tracked Oracle packages for code changes
|
|
||||||
-- Author: Grzegorz Michalski
|
|
||||||
-- Date: 2025-12-04
|
|
||||||
-- Version: 1.0.0
|
|
||||||
--
|
|
||||||
-- USAGE:
|
|
||||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
|
||||||
--
|
|
||||||
-- OUTPUT:
|
|
||||||
-- - List of all tracked packages with their current status
|
|
||||||
-- - OK: Package has not changed since last tracking
|
|
||||||
-- - WARNING: Package code changed without version update
|
|
||||||
-- ===================================================================
|
|
||||||
|
|
||||||
SET LINESIZE 200
|
|
||||||
SET PAGESIZE 1000
|
|
||||||
SET FEEDBACK OFF
|
|
||||||
|
|
||||||
PROMPT
|
|
||||||
PROMPT ========================================
|
|
||||||
PROMPT Package Version Verification
|
|
||||||
PROMPT ========================================
|
|
||||||
PROMPT
|
|
||||||
|
|
||||||
COLUMN PACKAGE_OWNER FORMAT A15
|
|
||||||
COLUMN PACKAGE_NAME FORMAT A20
|
|
||||||
COLUMN VERSION FORMAT A10
|
|
||||||
COLUMN STATUS FORMAT A80
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
PACKAGE_OWNER,
|
|
||||||
PACKAGE_NAME,
|
|
||||||
PACKAGE_VERSION AS VERSION,
|
|
||||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
|
||||||
FROM (
|
|
||||||
SELECT
|
|
||||||
PACKAGE_OWNER,
|
|
||||||
PACKAGE_NAME,
|
|
||||||
PACKAGE_VERSION,
|
|
||||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
|
||||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
|
||||||
)
|
|
||||||
WHERE RN = 1
|
|
||||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
|
||||||
|
|
||||||
PROMPT
|
|
||||||
PROMPT ========================================
|
|
||||||
PROMPT Verification Complete
|
|
||||||
PROMPT ========================================
|
|
||||||
PROMPT
|
|
||||||
PROMPT Legend:
|
|
||||||
PROMPT OK - Package has not changed since last tracking
|
|
||||||
PROMPT WARNING - Package code changed without version update
|
|
||||||
PROMPT
|
|
||||||
PROMPT For detailed hash information, use:
|
|
||||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
|
||||||
PROMPT ========================================
|
|
||||||
|
|
||||||
SET FEEDBACK ON
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
GRANT SELECT, INSERT, UPDATE, DELETE ON ct_ods.a_load_history TO ct_mrds;
|
|
||||||
|
|
||||||
create or replace TRIGGER ct_mrds.a_workflow_history
|
|
||||||
AFTER INSERT OR UPDATE OF workflow_successful ON ct_mrds.a_workflow_history
|
|
||||||
REFERENCING NEW AS new OLD AS old
|
|
||||||
FOR EACH ROW
|
|
||||||
DECLARE
|
|
||||||
v_workflow_name VARCHAR2(128);
|
|
||||||
BEGIN
|
|
||||||
IF :new.workflow_name IN ('w_ODS_LM_STANDING_FACILITIES', 'w_ODS_CSDB_DEBT', 'w_ODS_CSDB_DEBT_DAILY', 'w_ODS_CSDB_RATINGS_FULL') AND :new.service_name = 'ODS' THEN
|
|
||||||
IF :new.workflow_successful <> :old.workflow_successful AND :new.workflow_successful = 'Y' THEN
|
|
||||||
IF :new.workflow_name = 'w_ODS_LM_STANDING_FACILITIES' THEN
|
|
||||||
v_workflow_name := 'w_ODS_LM_STANDING_FACILITY';
|
|
||||||
ELSE
|
|
||||||
v_workflow_name := :new.workflow_name;
|
|
||||||
END IF;
|
|
||||||
INSERT INTO ct_ods.a_load_history (
|
|
||||||
a_etl_load_set_key, workflow_name, infa_run_id, load_start, load_end, exdi_appl_req_id, exdi_correlation_id, load_successful, wla_run_id, dq_flag
|
|
||||||
) VALUES (
|
|
||||||
:new.a_workflow_history_key, v_workflow_name, NULL, :new.workflow_start, :new.workflow_end, NULL, NULL, :new.workflow_successful, NULL, NULL
|
|
||||||
);
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
END
|
|
||||||
;
|
|
||||||
/
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
--DROP TRIGGER ct_mrds.a_workflow_history;
|
|
||||||
REVOKE SELECT, INSERT, UPDATE, DELETE ON ct_ods.a_load_history FROM ct_mrds;
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
WHENEVER SQLERROR EXIT FAILURE
|
|
||||||
SET SERVEROUTPUT ON
|
|
||||||
SET TIMING ON
|
|
||||||
SET ECHO ON
|
|
||||||
SET HEADING OFF
|
|
||||||
SET FEEDBACK ON
|
|
||||||
SET VERIFY OFF
|
|
||||||
|
|
||||||
var filename VARCHAR2(100)
|
|
||||||
BEGIN
|
|
||||||
SELECT 'INSTALL_MARS_851_' || SUBSTR(PDB_NAME, (INSTR(PDB_NAME,'_',1)+1), (LENGTH(PDB_NAME)-INSTR(PDB_NAME,'_',1))) || '_' ||TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS')||'.log' INTO :filename from DBA_PDBS;
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
column filename new_value _filename
|
|
||||||
select :filename filename from dual;
|
|
||||||
spool &_filename
|
|
||||||
|
|
||||||
prompt ##### started at time #####
|
|
||||||
select systimestamp from dual;
|
|
||||||
prompt ##### database name #####
|
|
||||||
SELECT SUBSTR(PDB_NAME, (INSTR(PDB_NAME,'_',1)+1), (LENGTH(PDB_NAME)-INSTR(PDB_NAME,'_',1))) AS PDB_NAME FROM DBA_PDBS;
|
|
||||||
|
|
||||||
|
|
||||||
@@01_MARS_851_install_CT_MRDS_A_LOAD_HISTORY_TRIGGER.sql
|
|
||||||
|
|
||||||
|
|
||||||
SET ECHO OFF
|
|
||||||
|
|
||||||
prompt ##### completed at time #####
|
|
||||||
select systimestamp from dual;
|
|
||||||
|
|
||||||
SPOOL OFF
|
|
||||||
EXIT
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
WHENEVER SQLERROR EXIT FAILURE
|
|
||||||
SET SERVEROUTPUT ON
|
|
||||||
SET TIMING ON
|
|
||||||
SET ECHO ON
|
|
||||||
SET HEADING OFF
|
|
||||||
SET FEEDBACK ON
|
|
||||||
SET VERIFY OFF
|
|
||||||
|
|
||||||
var filename VARCHAR2(100)
|
|
||||||
BEGIN
|
|
||||||
SELECT 'ROLLBACK_MARS_851_' || SUBSTR(PDB_NAME, (INSTR(PDB_NAME,'_',1)+1), (LENGTH(PDB_NAME)-INSTR(PDB_NAME,'_',1))) || '_' ||TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS')||'.log' INTO :filename from DBA_PDBS;
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
column filename new_value _filename
|
|
||||||
select :filename filename from dual;
|
|
||||||
spool &_filename
|
|
||||||
|
|
||||||
prompt ##### started at time #####
|
|
||||||
select systimestamp from dual;
|
|
||||||
prompt ##### database name #####
|
|
||||||
SELECT SUBSTR(PDB_NAME, (INSTR(PDB_NAME,'_',1)+1), (LENGTH(PDB_NAME)-INSTR(PDB_NAME,'_',1))) AS PDB_NAME FROM DBA_PDBS;
|
|
||||||
|
|
||||||
|
|
||||||
@@91_MARS_851_rollback_CT_MRDS_A_LOAD_HISTORY_TRIGGER.sql
|
|
||||||
|
|
||||||
|
|
||||||
SET ECHO OFF
|
|
||||||
|
|
||||||
prompt ##### completed at time #####
|
|
||||||
select systimestamp from dual;
|
|
||||||
|
|
||||||
SPOOL OFF
|
|
||||||
EXIT
|
|
||||||
5
MARS_Packages/REL02_POST/MARS-956/.gitignore
vendored
Normal file
5
MARS_Packages/REL02_POST/MARS-956/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Exclude temporary folders from version control
|
||||||
|
confluence/
|
||||||
|
log/
|
||||||
|
test/
|
||||||
|
mock_data/
|
||||||
@@ -0,0 +1,534 @@
|
|||||||
|
-- =====================================================================================
|
||||||
|
-- Script: 01_MARS_956_export_c2d_mpec_data.sql
|
||||||
|
-- Purpose: Export C2D MPEC historical data to ODS bucket
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Created: 2026-02-12
|
||||||
|
-- MARS Issue: MARS-956
|
||||||
|
-- Target: mrds_data_dev/ODS/C2D/
|
||||||
|
-- =====================================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||||
|
SET TIMING ON;
|
||||||
|
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT MARS-956: C2D MPEC Historical Data Export
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT Export Strategy:
|
||||||
|
PROMPT - Source: OU_LEGACY_C2D schema tables (operational database)
|
||||||
|
PROMPT - Target: ODS bucket as CSV files
|
||||||
|
PROMPT - Method: DATA_EXPORTER.EXPORT_TABLE_DATA
|
||||||
|
PROMPT - Registration: Files registered in A_SOURCE_FILE_RECEIVED
|
||||||
|
PROMPT - Path Structure: ODS/C2D/C2D_MPEC_*/
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
-- Log export start
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE, PROCEDURE_PARAMETERS)
|
||||||
|
VALUES ('MARS-956', 'EXPORT_C2D_MPEC_DATA', 'INFO', 'Starting historical C2D MPEC data export',
|
||||||
|
'Tables: MPEC_ADMIN, MPEC_CONTENT, MPEC_CONTENT_CRITERION');
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT PRE-EXPORT CHECK: Verify Existing Files in ODS Bucket
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
-- Check 1: MPEC_ADMIN files
|
||||||
|
DECLARE
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
vRecordCount NUMBER := 0;
|
||||||
|
vLocationUri VARCHAR2(1000);
|
||||||
|
BEGIN
|
||||||
|
-- Get bucket URI for DATA bucket
|
||||||
|
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/C2D/C2D_MPEC_ADMIN/';
|
||||||
|
|
||||||
|
-- Count existing files
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vFileCount
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||||
|
location_uri => vLocationUri
|
||||||
|
))
|
||||||
|
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
||||||
|
|
||||||
|
IF vFileCount > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: MPEC_ADMIN files already exist in DATA bucket');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
|
-- List existing files
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||||
|
location_uri => vLocationUri
|
||||||
|
))
|
||||||
|
WHERE object_name NOT LIKE '%/'
|
||||||
|
ORDER BY object_name
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- Count records in external table
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_ADMIN_ODS' INTO vRecordCount;
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||||
|
END;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing MPEC_ADMIN files found - bucket is clean');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 2: MPEC_CONTENT files
|
||||||
|
DECLARE
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
vRecordCount NUMBER := 0;
|
||||||
|
vLocationUri VARCHAR2(1000);
|
||||||
|
BEGIN
|
||||||
|
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/C2D/C2D_MPEC_CONTENT/';
|
||||||
|
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vFileCount
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||||
|
location_uri => vLocationUri
|
||||||
|
))
|
||||||
|
WHERE object_name NOT LIKE '%/';
|
||||||
|
|
||||||
|
IF vFileCount > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: MPEC_CONTENT files already exist in DATA bucket');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||||
|
location_uri => vLocationUri
|
||||||
|
))
|
||||||
|
WHERE object_name NOT LIKE '%/'
|
||||||
|
ORDER BY object_name
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_ODS' INTO vRecordCount;
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||||
|
END;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing MPEC_CONTENT files found - bucket is clean');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 3: MPEC_CONTENT_CRITERION files
|
||||||
|
DECLARE
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
vRecordCount NUMBER := 0;
|
||||||
|
vLocationUri VARCHAR2(1000);
|
||||||
|
BEGIN
|
||||||
|
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/C2D/C2D_MPEC_CONTENT_CRITERION/';
|
||||||
|
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vFileCount
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||||
|
location_uri => vLocationUri
|
||||||
|
))
|
||||||
|
WHERE object_name NOT LIKE '%/';
|
||||||
|
|
||||||
|
IF vFileCount > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: MPEC_CONTENT_CRITERION files already exist in DATA bucket');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||||
|
location_uri => vLocationUri
|
||||||
|
))
|
||||||
|
WHERE object_name NOT LIKE '%/'
|
||||||
|
ORDER BY object_name
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_CRITERION_ODS' INTO vRecordCount;
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||||
|
END;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing MPEC_CONTENT_CRITERION files found - bucket is clean');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT PRE-EXPORT: Verify Source and Target Table Readiness
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
-- Check source table counts before export
|
||||||
|
DECLARE
|
||||||
|
vAdminRows NUMBER := 0;
|
||||||
|
vContentRows NUMBER := 0;
|
||||||
|
vCriterionRows NUMBER := 0;
|
||||||
|
vTotalSource NUMBER := 0;
|
||||||
|
vAdminTarget NUMBER := 0;
|
||||||
|
vContentTarget NUMBER := 0;
|
||||||
|
vCriterionTarget NUMBER := 0;
|
||||||
|
vTotalTarget NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Source table counts
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminRows;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentRows;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionRows;
|
||||||
|
vTotalSource := vAdminRows + vContentRows + vCriterionRows;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Source table record counts (pre-export):');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN: ' || vAdminRows || ' records');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT: ' || vContentRows || ' records');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION: ' || vCriterionRows || ' records');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- TOTAL SOURCE: ' || vTotalSource || ' records');
|
||||||
|
|
||||||
|
-- Target external table counts (current state)
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_ADMIN_ODS' INTO vAdminTarget;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||||
|
vAdminTarget := 0; -- Empty is expected
|
||||||
|
ELSE
|
||||||
|
vAdminTarget := -1; -- Error
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_ODS' INTO vContentTarget;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||||
|
vContentTarget := 0;
|
||||||
|
ELSE
|
||||||
|
vContentTarget := -1;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_CRITERION_ODS' INTO vCriterionTarget;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||||
|
vCriterionTarget := 0;
|
||||||
|
ELSE
|
||||||
|
vCriterionTarget := -1;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
|
||||||
|
IF vAdminTarget >= 0 AND vContentTarget >= 0 AND vCriterionTarget >= 0 THEN
|
||||||
|
vTotalTarget := vAdminTarget + vContentTarget + vCriterionTarget;
|
||||||
|
ELSE
|
||||||
|
vTotalTarget := -1; -- Error state
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Target external table record counts (pre-export):');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_ADMIN_ODS: ' ||
|
||||||
|
CASE WHEN vAdminTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vAdminTarget) END);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT_ODS: ' ||
|
||||||
|
CASE WHEN vContentTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vContentTarget) END);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT_CRITERION_ODS: ' ||
|
||||||
|
CASE WHEN vCriterionTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vCriterionTarget) END);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- TOTAL TARGET: ' ||
|
||||||
|
CASE WHEN vTotalTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vTotalTarget) END);
|
||||||
|
|
||||||
|
IF vTotalSource > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Source tables contain data - ready for export');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: WARNING: No source data found');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF vTotalTarget = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Target external tables are clean - ready for fresh export');
|
||||||
|
ELSIF vTotalTarget > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Target tables contain ' || vTotalTarget || ' records - may be re-run');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Cannot access target external tables');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Proceeding with export...');
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT TABLE 1/3: OU_LEGACY_C2D.MPEC_ADMIN -> ODS/C2D/C2D_MPEC_ADMIN
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
pSchemaName => 'OU_LEGACY_C2D',
|
||||||
|
pTableName => 'MPEC_ADMIN',
|
||||||
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK', -- ETL key for data lookup
|
||||||
|
pBucketArea => 'ODS',
|
||||||
|
pFolderName => 'ODS/C2D/C2D_MPEC_ADMIN',
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_ADMIN', -- Template for column order
|
||||||
|
pMaxFileSize => 104857600, -- 100MB max file size
|
||||||
|
pRegisterExport => TRUE, -- Register files in A_SOURCE_FILE_RECEIVED
|
||||||
|
pProcessName => 'MARS-956' -- Process identifier for tracking
|
||||||
|
);
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: MPEC_ADMIN export completed successfully');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DECLARE
|
||||||
|
vErrorMsg VARCHAR2(4000) := SUBSTR(SQLERRM, 1, 4000);
|
||||||
|
BEGIN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: MPEC_ADMIN export failed: ' || vErrorMsg);
|
||||||
|
-- Log error using proper ENV_MANAGER pattern
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG
|
||||||
|
(guid, Username, Osuser, Machine, Module, process_name, procedure_name, procedure_parameters, log_level, log_message)
|
||||||
|
VALUES
|
||||||
|
('MARS-956', USER, SYS_CONTEXT('USERENV','OS_USER'), SYS_CONTEXT('USERENV','HOST'),
|
||||||
|
'MARS-956', 'MARS-956', 'EXPORT_MPEC_ADMIN', NULL, 'ERROR',
|
||||||
|
'Export failed: ' || vErrorMsg);
|
||||||
|
COMMIT;
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT TABLE 2/3: OU_LEGACY_C2D.MPEC_CONTENT -> ODS/C2D/C2D_MPEC_CONTENT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
pSchemaName => 'OU_LEGACY_C2D',
|
||||||
|
pTableName => 'MPEC_CONTENT',
|
||||||
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
|
pBucketArea => 'ODS',
|
||||||
|
pFolderName => 'ODS/C2D/C2D_MPEC_CONTENT',
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_CONTENT',
|
||||||
|
pMaxFileSize => 104857600, -- 100MB max file size
|
||||||
|
pRegisterExport => TRUE,
|
||||||
|
pProcessName => 'MARS-956' -- Process identifier for tracking
|
||||||
|
);
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: MPEC_CONTENT export completed successfully');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DECLARE
|
||||||
|
vErrorMsg VARCHAR2(4000) := SUBSTR(SQLERRM, 1, 4000);
|
||||||
|
BEGIN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: MPEC_CONTENT export failed: ' || vErrorMsg);
|
||||||
|
-- Log error using proper ENV_MANAGER pattern
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG
|
||||||
|
(guid, Username, Osuser, Machine, Module, process_name, procedure_name, procedure_parameters, log_level, log_message)
|
||||||
|
VALUES
|
||||||
|
('MARS-956', USER, SYS_CONTEXT('USERENV','OS_USER'), SYS_CONTEXT('USERENV','HOST'),
|
||||||
|
'MARS-956', 'MARS-956', 'EXPORT_MPEC_CONTENT', NULL, 'ERROR',
|
||||||
|
'Export failed: ' || vErrorMsg);
|
||||||
|
COMMIT;
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT TABLE 3/3: OU_LEGACY_C2D.MPEC_CONTENT_CRITERION -> ODS/C2D/C2D_MPEC_CONTENT_CRITERION
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
pSchemaName => 'OU_LEGACY_C2D',
|
||||||
|
pTableName => 'MPEC_CONTENT_CRITERION',
|
||||||
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
|
pBucketArea => 'ODS',
|
||||||
|
pFolderName => 'ODS/C2D/C2D_MPEC_CONTENT_CRITERION',
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_CONTENT_CRITERION',
|
||||||
|
pMaxFileSize => 104857600, -- 100MB max file size
|
||||||
|
pRegisterExport => TRUE,
|
||||||
|
pProcessName => 'MARS-956' -- Process identifier for tracking
|
||||||
|
);
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: MPEC_CONTENT_CRITERION export completed successfully');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DECLARE
|
||||||
|
vErrorMsg VARCHAR2(4000) := SUBSTR(SQLERRM, 1, 4000);
|
||||||
|
BEGIN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: MPEC_CONTENT_CRITERION export failed: ' || vErrorMsg);
|
||||||
|
-- Log error using proper ENV_MANAGER pattern
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG
|
||||||
|
(guid, Username, Osuser, Machine, Module, process_name, procedure_name, procedure_parameters, log_level, log_message)
|
||||||
|
VALUES
|
||||||
|
('MARS-956', USER, SYS_CONTEXT('USERENV','OS_USER'), SYS_CONTEXT('USERENV','HOST'),
|
||||||
|
'MARS-956', 'MARS-956', 'EXPORT_MPEC_CONTENT_CRITERION', NULL, 'ERROR',
|
||||||
|
'Export failed: ' || vErrorMsg);
|
||||||
|
COMMIT;
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT Export Summary - Checking Results
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
-- Log completion
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||||
|
VALUES ('MARS-956', 'EXPORT_C2D_MPEC_DATA', 'INFO', 'All C2D MPEC historical exports completed successfully');
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT MARS-956 C2D MPEC Export Completed Successfully!
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT POST-EXPORT: Source vs Target Record Count Comparison
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
-- Verify record counts after export
|
||||||
|
DECLARE
|
||||||
|
vAdminSource NUMBER := 0;
|
||||||
|
vContentSource NUMBER := 0;
|
||||||
|
vCriterionSource NUMBER := 0;
|
||||||
|
vTotalSource NUMBER := 0;
|
||||||
|
vAdminTarget NUMBER := 0;
|
||||||
|
vContentTarget NUMBER := 0;
|
||||||
|
vCriterionTarget NUMBER := 0;
|
||||||
|
vTotalTarget NUMBER := 0;
|
||||||
|
vMismatchCount NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Source table counts
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminSource;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentSource;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionSource;
|
||||||
|
vTotalSource := vAdminSource + vContentSource + vCriterionSource;
|
||||||
|
|
||||||
|
-- Target external table counts
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_ADMIN_ODS' INTO vAdminTarget;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_ODS' INTO vContentTarget;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_CRITERION_ODS' INTO vCriterionTarget;
|
||||||
|
vTotalTarget := vAdminTarget + vContentTarget + vCriterionTarget;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('POST-EXPORT VERIFICATION SUMMARY');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('=====================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Table | Source | Target | Match');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------');
|
||||||
|
|
||||||
|
-- MPEC_ADMIN comparison
|
||||||
|
DBMS_OUTPUT.PUT_LINE('MPEC_ADMIN | ' ||
|
||||||
|
RPAD(vAdminSource, 8) || ' | ' ||
|
||||||
|
RPAD(vAdminTarget, 8) || ' | ' ||
|
||||||
|
CASE WHEN vAdminSource = vAdminTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||||
|
IF vAdminSource != vAdminTarget THEN vMismatchCount := vMismatchCount + 1; END IF;
|
||||||
|
|
||||||
|
-- MPEC_CONTENT comparison
|
||||||
|
DBMS_OUTPUT.PUT_LINE('MPEC_CONTENT | ' ||
|
||||||
|
RPAD(vContentSource, 8) || ' | ' ||
|
||||||
|
RPAD(vContentTarget, 8) || ' | ' ||
|
||||||
|
CASE WHEN vContentSource = vContentTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||||
|
IF vContentSource != vContentTarget THEN vMismatchCount := vMismatchCount + 1; END IF;
|
||||||
|
|
||||||
|
-- MPEC_CONTENT_CRITERION comparison
|
||||||
|
DBMS_OUTPUT.PUT_LINE('MPEC_CONTENT_CRITERION | ' ||
|
||||||
|
RPAD(vCriterionSource, 8) || ' | ' ||
|
||||||
|
RPAD(vCriterionTarget, 8) || ' | ' ||
|
||||||
|
CASE WHEN vCriterionSource = vCriterionTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||||
|
IF vCriterionSource != vCriterionTarget THEN vMismatchCount := vMismatchCount + 1; END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('TOTAL | ' ||
|
||||||
|
RPAD(vTotalSource, 8) || ' | ' ||
|
||||||
|
RPAD(vTotalTarget, 8) || ' | ' ||
|
||||||
|
CASE WHEN vTotalSource = vTotalTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
IF vMismatchCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All record counts match - export verified');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vMismatchCount || ' table(s) have record count mismatches');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Please review export logs and external table access permissions');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Cannot verify target external tables post-export');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Please check external table configuration and ODS bucket access');
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Log export completion
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE, PROCEDURE_PARAMETERS)
|
||||||
|
VALUES ('MARS-956', 'EXPORT_C2D_MPEC_DATA', 'INFO', 'Historical C2D MPEC data export completed',
|
||||||
|
'Check verification scripts for detailed results');
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT MARS-956 C2D MPEC Historical Data Export - COMPLETED
|
||||||
|
PROMPT
|
||||||
|
PROMPT Next steps:
|
||||||
|
PROMPT 1. Run: @02_MARS_956_verify_exports.sql (verify file registration)
|
||||||
|
PROMPT 2. Run: @03_MARS_956_verify_data_integrity.sql (full data verification)
|
||||||
|
PROMPT =====================================================================================
|
||||||
190
MARS_Packages/REL02_POST/MARS-956/02_MARS_956_verify_exports.sql
Normal file
190
MARS_Packages/REL02_POST/MARS-956/02_MARS_956_verify_exports.sql
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-956 Verify Exports: Check Export Results and File Creation
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Verify that C2D MPEC export completed successfully
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Export Verification
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
-- Check 1: Verify files were registered in A_SOURCE_FILE_RECEIVED
|
||||||
|
PROMPT Checking export file registration...
|
||||||
|
DECLARE
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
vTotalBytes NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
SELECT COUNT(*), NVL(SUM(BYTES), 0)
|
||||||
|
INTO vFileCount, vTotalBytes
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE RECEPTION_DATE >= SYSDATE - 1/24 -- Last hour
|
||||||
|
AND (SOURCE_FILE_NAME LIKE '2001_%' -- MPEC_ADMIN ETL keys
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2002_%'
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2003_%'
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2004_%'
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2005_%'
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2006_%' -- MPEC_CONTENT ETL keys
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2007_%'
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2008_%'
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2009_%' -- MPEC_CONTENT_CRITERION ETL keys
|
||||||
|
OR SOURCE_FILE_NAME LIKE '2010_%');
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Registered export files: ' || vFileCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Total file size: ' || ROUND(vTotalBytes/1024, 2) || ' KB');
|
||||||
|
|
||||||
|
IF vFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: No export files found in registration');
|
||||||
|
ELSIF vFileCount < 9 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Expected 9 files (3 tables x 3 ETL keys), found: ' || vFileCount);
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All expected export files found');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 2: Show recent export registrations
|
||||||
|
PROMPT Recent export file registrations:
|
||||||
|
SELECT
|
||||||
|
SUBSTR(SOURCE_FILE_NAME, 1, 40) AS FILE_NAME,
|
||||||
|
A_SOURCE_FILE_CONFIG_KEY AS CONFIG_KEY,
|
||||||
|
PROCESSING_STATUS,
|
||||||
|
ROUND(BYTES/1024, 2) AS SIZE_KB,
|
||||||
|
TO_CHAR(RECEPTION_DATE, 'HH24:MI:SS') AS TIME_EXPORTED
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE RECEPTION_DATE >= SYSDATE - 1/24 -- Last hour
|
||||||
|
AND (SOURCE_FILE_NAME LIKE '200%') -- ETL keys starting with 200
|
||||||
|
ORDER BY RECEPTION_DATE DESC;
|
||||||
|
|
||||||
|
-- Check 3: Verify export process logs
|
||||||
|
PROMPT Checking export process logs...
|
||||||
|
DECLARE
|
||||||
|
vLogCount NUMBER := 0;
|
||||||
|
vErrorCount NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
SELECT COUNT(*), SUM(CASE WHEN LOG_LEVEL = 'ERROR' THEN 1 ELSE 0 END)
|
||||||
|
INTO vLogCount, vErrorCount
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND LOG_TIMESTAMP >= SYSTIMESTAMP - INTERVAL '1' HOUR;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Process log entries: ' || vLogCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Error entries: ' || vErrorCount);
|
||||||
|
|
||||||
|
IF vErrorCount > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vErrorCount || ' errors found in process log');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: No errors found in process log');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 4: Display recent process logs
|
||||||
|
PROMPT Recent MARS-956 process logs:
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(LOG_TIMESTAMP, 'HH24:MI:SS') AS TIME,
|
||||||
|
PROCEDURE_NAME,
|
||||||
|
LOG_LEVEL,
|
||||||
|
SUBSTR(LOG_MESSAGE, 1, 60) AS MESSAGE
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND LOG_TIMESTAMP >= SYSTIMESTAMP - INTERVAL '1' HOUR
|
||||||
|
ORDER BY LOG_TIMESTAMP DESC
|
||||||
|
FETCH FIRST 10 ROWS ONLY;
|
||||||
|
|
||||||
|
-- Check 5: Cloud bucket file verification (if cloud_wrapper available)
|
||||||
|
PROMPT Checking cloud bucket files...
|
||||||
|
DECLARE
|
||||||
|
vCloudFileCount NUMBER := 0;
|
||||||
|
vCredentialName VARCHAR2(100);
|
||||||
|
vDataBucketUri VARCHAR2(500);
|
||||||
|
BEGIN
|
||||||
|
-- Get bucket URI and credential
|
||||||
|
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ODS');
|
||||||
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Checking ODS bucket: ' || vDataBucketUri);
|
||||||
|
|
||||||
|
-- Count files in cloud bucket
|
||||||
|
BEGIN
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT object_name
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => vCredentialName,
|
||||||
|
location_uri => vDataBucketUri
|
||||||
|
))
|
||||||
|
WHERE object_name LIKE 'ODS/C2D/C2D_MPEC_%'
|
||||||
|
) LOOP
|
||||||
|
vCloudFileCount := vCloudFileCount + 1;
|
||||||
|
IF vCloudFileCount <= 5 THEN -- Show first 5 files
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- ' || rec.object_name);
|
||||||
|
END IF;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Cloud bucket files found: ' || vCloudFileCount);
|
||||||
|
|
||||||
|
IF vCloudFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: No files found in cloud bucket');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot access cloud bucket: ' || SQLERRM);
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Export Verification Summary
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
-- Final verification summary
|
||||||
|
DECLARE
|
||||||
|
vFileRegCount NUMBER := 0;
|
||||||
|
vCloudFileCount NUMBER := 0;
|
||||||
|
vLogErrorCount NUMBER := 0;
|
||||||
|
vOverallStatus VARCHAR2(20);
|
||||||
|
BEGIN
|
||||||
|
-- Count registered files
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vFileRegCount
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE RECEPTION_DATE >= SYSDATE - 1/24
|
||||||
|
AND SOURCE_FILE_NAME LIKE '200%';
|
||||||
|
|
||||||
|
-- Count process errors
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vLogErrorCount
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND LOG_LEVEL = 'ERROR'
|
||||||
|
AND LOG_TIMESTAMP >= SYSTIMESTAMP - INTERVAL '1' HOUR;
|
||||||
|
|
||||||
|
-- Determine overall status
|
||||||
|
IF vFileRegCount >= 9 AND vLogErrorCount = 0 THEN
|
||||||
|
vOverallStatus := 'SUCCESS';
|
||||||
|
ELSIF vFileRegCount > 0 AND vLogErrorCount = 0 THEN
|
||||||
|
vOverallStatus := 'PARTIAL SUCCESS';
|
||||||
|
ELSE
|
||||||
|
vOverallStatus := 'ISSUES DETECTED';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('MARS-956 Export Verification: ' || vOverallStatus);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Registered files: ' || vFileRegCount || ' (expected: 9)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Process errors: ' || vLogErrorCount);
|
||||||
|
|
||||||
|
IF vOverallStatus = 'SUCCESS' THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All validations passed - export successful');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Some issues detected - review logs');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Export Verification Completed
|
||||||
|
PROMPT =========================================================================
|
||||||
@@ -0,0 +1,354 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-956 Verify Data Integrity: Source vs Exported Data Validation
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Verify data integrity between source tables and exported files
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Data Integrity Verification
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
-- Check 1: Source table record counts vs expected ETL keys
|
||||||
|
PROMPT Checking source table record counts...
|
||||||
|
DECLARE
|
||||||
|
vAdminRows NUMBER := 0;
|
||||||
|
vContentRows NUMBER := 0;
|
||||||
|
vCriterionRows NUMBER := 0;
|
||||||
|
vTotalRows NUMBER := 0;
|
||||||
|
vExpectedFiles NUMBER := 9; -- 3 tables x 3 ETL keys average
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminRows;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentRows;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionRows;
|
||||||
|
|
||||||
|
vTotalRows := vAdminRows + vContentRows + vCriterionRows;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Source table record counts:');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN: ' || vAdminRows || ' records');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT: ' || vContentRows || ' records');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION: ' || vCriterionRows || ' records');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- TOTAL: ' || vTotalRows || ' records');
|
||||||
|
|
||||||
|
IF vTotalRows > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All source tables contain data');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: No data found in source tables');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 2: ETL key distribution analysis
|
||||||
|
PROMPT Checking ETL key distribution...
|
||||||
|
DECLARE
|
||||||
|
vAdminKeys NUMBER := 0;
|
||||||
|
vContentKeys NUMBER := 0;
|
||||||
|
vCriterionKeys NUMBER := 0;
|
||||||
|
vTotalKeys NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminKeys;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentKeys;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionKeys;
|
||||||
|
|
||||||
|
SELECT COUNT(DISTINCT etl_key)
|
||||||
|
INTO vTotalKeys
|
||||||
|
FROM (
|
||||||
|
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
|
||||||
|
UNION
|
||||||
|
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
|
||||||
|
UNION
|
||||||
|
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
|
||||||
|
);
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ETL key distribution:');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN distinct keys: ' || vAdminKeys);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT distinct keys: ' || vContentKeys);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION distinct keys: ' || vCriterionKeys);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Total distinct ETL keys: ' || vTotalKeys);
|
||||||
|
|
||||||
|
IF vTotalKeys > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: ETL key distribution looks normal');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: No ETL keys found in source data');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 3: Template table compatibility verification
|
||||||
|
PROMPT Checking template table compatibility...
|
||||||
|
DECLARE
|
||||||
|
vAdminCols NUMBER := 0;
|
||||||
|
vContentCols NUMBER := 0;
|
||||||
|
vCriterionCols NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Check MPEC_ADMIN template compatibility
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vAdminCols
|
||||||
|
FROM all_tab_columns
|
||||||
|
WHERE owner = 'CT_ET_TEMPLATES'
|
||||||
|
AND table_name = 'C2D_MPEC_ADMIN';
|
||||||
|
|
||||||
|
-- Check MPEC_CONTENT template compatibility
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vContentCols
|
||||||
|
FROM all_tab_columns
|
||||||
|
WHERE owner = 'CT_ET_TEMPLATES'
|
||||||
|
AND table_name = 'C2D_MPEC_CONTENT';
|
||||||
|
|
||||||
|
-- Check MPEC_CONTENT_CRITERION template compatibility
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vCriterionCols
|
||||||
|
FROM all_tab_columns
|
||||||
|
WHERE owner = 'CT_ET_TEMPLATES'
|
||||||
|
AND table_name = 'C2D_MPEC_CONTENT_CRITERION';
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Template table column counts:');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_ADMIN: ' || vAdminCols || ' columns');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT: ' || vContentCols || ' columns');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT_CRITERION: ' || vCriterionCols || ' columns');
|
||||||
|
|
||||||
|
IF vAdminCols > 0 AND vContentCols > 0 AND vCriterionCols > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All template tables have defined structure');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: One or more template tables missing columns');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 4: Verify A_ETL_LOAD_SET_FK values exist in A_LOAD_HISTORY
|
||||||
|
PROMPT Checking ETL key references in A_LOAD_HISTORY...
|
||||||
|
DECLARE
|
||||||
|
vValidKeys NUMBER := 0;
|
||||||
|
vTotalSourceKeys NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Count total distinct ETL keys in source tables
|
||||||
|
SELECT COUNT(DISTINCT etl_key)
|
||||||
|
INTO vTotalSourceKeys
|
||||||
|
FROM (
|
||||||
|
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
|
||||||
|
UNION
|
||||||
|
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
|
||||||
|
UNION
|
||||||
|
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Count how many exist in A_LOAD_HISTORY
|
||||||
|
SELECT COUNT(DISTINCT etl_key)
|
||||||
|
INTO vValidKeys
|
||||||
|
FROM (
|
||||||
|
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
|
||||||
|
UNION
|
||||||
|
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
|
||||||
|
UNION
|
||||||
|
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
|
||||||
|
) src
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM CT_ODS.A_LOAD_HISTORY h
|
||||||
|
WHERE h.A_ETL_LOAD_SET_KEY = src.etl_key
|
||||||
|
);
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ETL key validation:');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Total distinct ETL keys in source: ' || vTotalSourceKeys);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Valid keys (exist in A_LOAD_HISTORY): ' || vValidKeys);
|
||||||
|
|
||||||
|
IF vValidKeys = vTotalSourceKeys THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All source ETL keys are valid');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Some ETL keys may be invalid: ' || (vTotalSourceKeys - vValidKeys));
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT MARS-956 Record Count Verification
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
PROMPT Comparing source table counts with exported external table counts
|
||||||
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
TYPE t_table_info IS RECORD (
|
||||||
|
source_schema VARCHAR2(50),
|
||||||
|
source_table VARCHAR2(100),
|
||||||
|
external_table VARCHAR2(100),
|
||||||
|
description VARCHAR2(200)
|
||||||
|
);
|
||||||
|
TYPE t_table_list IS TABLE OF t_table_info;
|
||||||
|
|
||||||
|
vTables t_table_list;
|
||||||
|
vSourceCount NUMBER;
|
||||||
|
vTargetCount NUMBER;
|
||||||
|
vTotalSourceCount NUMBER := 0;
|
||||||
|
vTotalTargetCount NUMBER := 0;
|
||||||
|
vMismatchCount NUMBER := 0;
|
||||||
|
vSql VARCHAR2(4000);
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
vValidationResult VARCHAR2(100);
|
||||||
|
BEGIN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
|
-- Initialize table list with C2D MPEC configuration
|
||||||
|
vTables := t_table_list(
|
||||||
|
t_table_info('OU_LEGACY_C2D', 'MPEC_ADMIN', 'ODS.C2D_MPEC_ADMIN_ODS', 'MPEC Admin data (ETL keys 2001-2005)'),
|
||||||
|
t_table_info('OU_LEGACY_C2D', 'MPEC_CONTENT', 'ODS.C2D_MPEC_CONTENT_ODS', 'MPEC Content data (ETL keys 2006-2008)'),
|
||||||
|
t_table_info('OU_LEGACY_C2D', 'MPEC_CONTENT_CRITERION', 'ODS.C2D_MPEC_CONTENT_CRITERION_ODS', 'MPEC Criterion data (ETL keys 2009-2010)')
|
||||||
|
);
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Table Name Source Count Target Count Status');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
|
|
||||||
|
FOR i IN 1..vTables.COUNT LOOP
|
||||||
|
-- Get source table count
|
||||||
|
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).source_schema || '.' || vTables(i).source_table;
|
||||||
|
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE vSql INTO vSourceCount;
|
||||||
|
vTotalSourceCount := vTotalSourceCount + vSourceCount;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
vSourceCount := -1;
|
||||||
|
DBMS_OUTPUT.PUT_LINE(RPAD(vTables(i).source_table, 24) || 'ERROR: Cannot access source table');
|
||||||
|
CONTINUE;
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- Get target external table count
|
||||||
|
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).external_table;
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE vSql INTO vTargetCount;
|
||||||
|
vTotalTargetCount := vTotalTargetCount + vTargetCount;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
-- Handle expected errors for empty external tables
|
||||||
|
-- ORA-29913: error in executing ODCIEXTTABLEOPEN callout
|
||||||
|
-- ORA-29400: data cartridge error
|
||||||
|
-- KUP-13023: nothing matched wildcard query (no files in bucket)
|
||||||
|
-- NOTE: ORA-30653 (reject limit) is a real data quality error, not treated as empty
|
||||||
|
IF vSourceCount = 0 OR SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||||
|
vTargetCount := 0; -- Treat as empty (no files exported yet)
|
||||||
|
ELSE
|
||||||
|
vTargetCount := -1; -- Real error
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- Display comparison results with thousands separators
|
||||||
|
DECLARE
|
||||||
|
vStatus VARCHAR2(20);
|
||||||
|
vSourceDisplay VARCHAR2(17);
|
||||||
|
vTargetDisplay VARCHAR2(17);
|
||||||
|
BEGIN
|
||||||
|
-- Format source count display
|
||||||
|
IF vSourceCount = -1 THEN
|
||||||
|
vSourceDisplay := 'ERROR';
|
||||||
|
ELSE
|
||||||
|
vSourceDisplay := TO_CHAR(vSourceCount, '9,999,999,999');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Format target count display
|
||||||
|
IF vTargetCount = -1 THEN
|
||||||
|
vTargetDisplay := 'ERROR';
|
||||||
|
ELSE
|
||||||
|
vTargetDisplay := TO_CHAR(vTargetCount, '9,999,999,999');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Determine status
|
||||||
|
IF vSourceCount = vTargetCount THEN
|
||||||
|
vStatus := 'PASS';
|
||||||
|
ELSIF vTargetCount = -1 THEN
|
||||||
|
vStatus := 'ERROR';
|
||||||
|
vMismatchCount := vMismatchCount + 1;
|
||||||
|
ELSIF vSourceCount = -1 THEN
|
||||||
|
vStatus := 'ERROR';
|
||||||
|
vMismatchCount := vMismatchCount + 1;
|
||||||
|
ELSE
|
||||||
|
vStatus := 'MISMATCH';
|
||||||
|
vMismatchCount := vMismatchCount + 1;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE(
|
||||||
|
RPAD(vTables(i).source_table, 24) ||
|
||||||
|
LPAD(vSourceDisplay, 15) ||
|
||||||
|
LPAD(vTargetDisplay, 15) || ' ' ||
|
||||||
|
vStatus
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(
|
||||||
|
RPAD('TOTALS', 24) ||
|
||||||
|
LPAD(TO_CHAR(vTotalSourceCount, '9,999,999,999'), 15) ||
|
||||||
|
LPAD(TO_CHAR(vTotalTargetCount, '9,999,999,999'), 15)
|
||||||
|
);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
|
-- Count exported files for additional verification
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vFileCount
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE RECEPTION_DATE >= SYSDATE - 1/24
|
||||||
|
AND (SOURCE_FILE_NAME LIKE '200_%');
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '9,999,999,999'));
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Total target records: ' || TO_CHAR(vTotalTargetCount, '9,999,999,999') || ' (exported to ODS)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Export files registered: ' || vFileCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
|
IF vMismatchCount = 0 AND vFileCount > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('[PASS] VERIFICATION PASSED');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' All record counts match between source and exported data');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Export completed successfully');
|
||||||
|
ELSIF vMismatchCount > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('[INFO] VERIFICATION COMPLETED WITH MISMATCHES');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Found ' || vMismatchCount || ' table(s) with count mismatches');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' NOTE: Mismatches may be caused by pre-existing files in buckets (see pre-check)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Review export logs and pre-check results before re-running exports');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('[WARN] NO EXPORT DETECTED');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' No files found in export registration');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Verify export execution completed successfully');
|
||||||
|
END IF;
|
||||||
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Legend:');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' PASS - Record counts match (export successful)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (verify table exists and permissions)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
|
||||||
|
-- Additional ETL key analysis for C2D MPEC data
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ETL Key Analysis:');
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vAdminKeys NUMBER;
|
||||||
|
vContentKeys NUMBER;
|
||||||
|
vCriterionKeys NUMBER;
|
||||||
|
BEGIN
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminKeys;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentKeys;
|
||||||
|
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionKeys;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN distinct ETL keys: ' || vAdminKeys || ' (expected: 3 for keys 2001-2005)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT distinct ETL keys: ' || vContentKeys || ' (expected: 3 for keys 2006-2008)');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION distinct ETL keys: ' || vCriterionKeys || ' (expected: 2 for keys 2009-2010)');
|
||||||
|
|
||||||
|
-- Expected file count = sum of distinct ETL keys per table
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Expected export files: ' || (vAdminKeys + vContentKeys + vCriterionKeys));
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Actual export files: ' || vFileCount);
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Data Integrity Verification Completed
|
||||||
|
PROMPT =========================================================================
|
||||||
@@ -0,0 +1,167 @@
|
|||||||
|
--=============================================================================================================================
|
||||||
|
-- MARS-956 ROLLBACK: Delete Exported CSV Files from DATA Bucket
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- Purpose: Delete exported CSV files from ODS/C2D bucket folders for MPEC tables
|
||||||
|
-- WARNING: This will permanently delete exported data files!
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
-- Related: MARS-956 - C2D MPEC Data Export Rollback
|
||||||
|
--=============================================================================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT ROLLBACK: Deleting C2D_MPEC_ADMIN exported files
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT WARNING: This will delete files from:
|
||||||
|
PROMPT - DATA bucket: mrds_data_dev/ODS/C2D/C2D_MPEC_ADMIN/
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vDataBucketUri VARCHAR2(500);
|
||||||
|
vCredentialName VARCHAR2(100);
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Get bucket URI and credential
|
||||||
|
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||||
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Deleting C2D_MPEC_ADMIN files from DATA bucket...');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-956''');
|
||||||
|
|
||||||
|
-- Delete CSV files registered by MARS-956 process
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT SOURCE_FILE_NAME AS object_name
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND SOURCE_FILE_NAME LIKE '%MPEC_ADMIN%'
|
||||||
|
) LOOP
|
||||||
|
BEGIN
|
||||||
|
DBMS_CLOUD.DELETE_OBJECT(
|
||||||
|
credential_name => vCredentialName,
|
||||||
|
object_uri => vDataBucketUri || 'ODS/C2D/C2D_MPEC_ADMIN/' || rec.object_name
|
||||||
|
);
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||||
|
vFileCount := vFileCount + 1;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
IF SQLCODE = -20404 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||||
|
ELSE
|
||||||
|
RAISE;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
IF vFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' INFO: No files found to delete');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: C2D_MPEC_ADMIN files deleted (' || vFileCount || ' file(s))');
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT ROLLBACK: Deleting C2D_MPEC_CONTENT exported files
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT WARNING: This will delete files from:
|
||||||
|
PROMPT - DATA bucket: mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT/
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vDataBucketUri VARCHAR2(500);
|
||||||
|
vCredentialName VARCHAR2(100);
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Get bucket URI and credential
|
||||||
|
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||||
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Deleting C2D_MPEC_CONTENT files from DATA bucket...');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-956''');
|
||||||
|
|
||||||
|
-- Delete CSV files registered by MARS-956 process
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT SOURCE_FILE_NAME AS object_name
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND SOURCE_FILE_NAME LIKE '%MPEC_CONTENT%'
|
||||||
|
AND SOURCE_FILE_NAME NOT LIKE '%CRITERION%'
|
||||||
|
) LOOP
|
||||||
|
BEGIN
|
||||||
|
DBMS_CLOUD.DELETE_OBJECT(
|
||||||
|
credential_name => vCredentialName,
|
||||||
|
object_uri => vDataBucketUri || 'ODS/C2D/C2D_MPEC_CONTENT/' || rec.object_name
|
||||||
|
);
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||||
|
vFileCount := vFileCount + 1;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
IF SQLCODE = -20404 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||||
|
ELSE
|
||||||
|
RAISE;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
IF vFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' INFO: No files found to delete');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: C2D_MPEC_CONTENT files deleted (' || vFileCount || ' file(s))');
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT ROLLBACK: Deleting C2D_MPEC_CONTENT_CRITERION exported files
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT WARNING: This will delete files from:
|
||||||
|
PROMPT - DATA bucket: mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT_CRITERION/
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vDataBucketUri VARCHAR2(500);
|
||||||
|
vCredentialName VARCHAR2(100);
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Get bucket URI and credential
|
||||||
|
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||||
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Deleting C2D_MPEC_CONTENT_CRITERION files from DATA bucket...');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-956''');
|
||||||
|
|
||||||
|
-- Delete CSV files registered by MARS-956 process
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT SOURCE_FILE_NAME AS object_name
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND SOURCE_FILE_NAME LIKE '%MPEC_CONTENT_CRITERION%'
|
||||||
|
) LOOP
|
||||||
|
BEGIN
|
||||||
|
DBMS_CLOUD.DELETE_OBJECT(
|
||||||
|
credential_name => vCredentialName,
|
||||||
|
object_uri => vDataBucketUri || 'ODS/C2D/C2D_MPEC_CONTENT_CRITERION/' || rec.object_name
|
||||||
|
);
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||||
|
vFileCount := vFileCount + 1;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
IF SQLCODE = -20404 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||||
|
ELSE
|
||||||
|
RAISE;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
IF vFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' INFO: No files found to delete');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: C2D_MPEC_CONTENT_CRITERION files deleted (' || vFileCount || ' file(s))');
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT SUCCESS: All CSV file deletion operations completed
|
||||||
@@ -0,0 +1,78 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-956 Rollback Step 1: Delete File Registrations
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Remove MARS-956 export file registrations from A_SOURCE_FILE_RECEIVED
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Rollback Step 1: Delete File Registrations
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vFileCount NUMBER := 0;
|
||||||
|
vDeletedCount NUMBER := 0;
|
||||||
|
vErrorMsg VARCHAR2(4000);
|
||||||
|
BEGIN
|
||||||
|
-- Count files to be deleted (using PROCESS_NAME)
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vFileCount
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956';
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Files to be deleted: ' || vFileCount);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Using PROCESS_NAME = ''MARS-956'' filter');
|
||||||
|
|
||||||
|
IF vFileCount > 0 THEN
|
||||||
|
-- Show files before deletion
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Files being removed:');
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT A_SOURCE_FILE_RECEIVED_KEY,
|
||||||
|
SUBSTR(SOURCE_FILE_NAME, 1, 60) AS FILE_NAME,
|
||||||
|
TO_CHAR(RECEPTION_DATE, 'YYYY-MM-DD HH24:MI:SS') AS RECEIVED_TIME,
|
||||||
|
PROCESS_NAME
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
ORDER BY RECEPTION_DATE DESC
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- ' || rec.FILE_NAME || ' (ID: ' || rec.A_SOURCE_FILE_RECEIVED_KEY || ', Process: ' || rec.PROCESS_NAME || ')');
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- Delete the file registrations using PROCESS_NAME
|
||||||
|
DELETE FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956';
|
||||||
|
|
||||||
|
vDeletedCount := SQL%ROWCOUNT;
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Successfully deleted ' || vDeletedCount || ' file registrations');
|
||||||
|
|
||||||
|
-- Log the rollback action
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||||
|
VALUES ('MARS-956-ROLLBACK', 'DELETE_FILE_REGISTRATIONS', 'INFO',
|
||||||
|
'Deleted ' || vDeletedCount || ' file registrations');
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: No file registrations found to delete');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
ROLLBACK;
|
||||||
|
vErrorMsg := 'Failed to delete file registrations: ' || SQLERRM;
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Error during file registration deletion: ' || SQLERRM);
|
||||||
|
-- Log the error
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||||
|
VALUES ('MARS-956-ROLLBACK', 'DELETE_FILE_REGISTRATIONS', 'ERROR', vErrorMsg);
|
||||||
|
COMMIT;
|
||||||
|
RAISE;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT File Registration Rollback Completed
|
||||||
|
PROMPT =========================================================================
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-956 Rollback Step 2: Clean Process Logs
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Remove MARS-956 process logs from A_PROCESS_LOG
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Rollback Step 2: Clean Process Logs
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vLogCount NUMBER := 0;
|
||||||
|
vDeletedCount NUMBER := 0;
|
||||||
|
vErrorMsg VARCHAR2(4000);
|
||||||
|
BEGIN
|
||||||
|
-- Count logs to be deleted
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vLogCount
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME IN ('MARS-956', 'MARS-956-ROLLBACK')
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 7; -- Last week (safety)
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Process log entries to be deleted: ' || vLogCount);
|
||||||
|
|
||||||
|
IF vLogCount > 0 THEN
|
||||||
|
-- Show recent logs before deletion
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Recent MARS-956 log entries being removed:');
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT A_PROCESS_LOG_KEY,
|
||||||
|
TO_CHAR(LOG_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS LOG_TIME,
|
||||||
|
PROCEDURE_NAME,
|
||||||
|
LOG_LEVEL,
|
||||||
|
SUBSTR(LOG_MESSAGE, 1, 40) AS MESSAGE
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME IN ('MARS-956', 'MARS-956-ROLLBACK')
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 7
|
||||||
|
ORDER BY LOG_TIMESTAMP DESC
|
||||||
|
FETCH FIRST 10 ROWS ONLY
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- ' || rec.LOG_TIME || ' [' || rec.LOG_LEVEL || '] ' ||
|
||||||
|
rec.PROCEDURE_NAME || ': ' || rec.MESSAGE);
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- Delete the process logs
|
||||||
|
DELETE FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME IN ('MARS-956', 'MARS-956-ROLLBACK')
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 7;
|
||||||
|
|
||||||
|
vDeletedCount := SQL%ROWCOUNT;
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Successfully deleted ' || vDeletedCount || ' process log entries');
|
||||||
|
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: No process log entries found to delete');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
ROLLBACK;
|
||||||
|
vErrorMsg := 'Failed to clean process logs: ' || SQLERRM;
|
||||||
|
DBMS_OUTPUT.PUT_LINE('ERROR: Error during process log cleanup: ' || SQLERRM);
|
||||||
|
-- Log the error (will remain after rollback for debugging)
|
||||||
|
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||||
|
VALUES ('MARS-956-ROLLBACK', 'CLEANUP_PROCESS_LOGS', 'ERROR', vErrorMsg);
|
||||||
|
COMMIT;
|
||||||
|
RAISE;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Process Log Cleanup Completed
|
||||||
|
PROMPT =========================================================================
|
||||||
@@ -0,0 +1,207 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-956 Rollback Verification: Confirm Rollback Completion
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Verify that MARS-956 rollback completed successfully
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET TIMING ON
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Rollback Verification
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
-- Check 1: Verify file registrations were removed
|
||||||
|
PROMPT Checking file registration cleanup...
|
||||||
|
DECLARE
|
||||||
|
vRemainingFiles NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vRemainingFiles
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE SOURCE_FILE_NAME LIKE '200%' -- ETL keys 2001-2010
|
||||||
|
AND RECEPTION_DATE >= SYSDATE - 7; -- Last week
|
||||||
|
|
||||||
|
IF vRemainingFiles = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All MARS-956 file registrations successfully removed');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vRemainingFiles || ' file registrations still exist');
|
||||||
|
|
||||||
|
-- Show remaining files
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT SUBSTR(SOURCE_FILE_NAME, 1, 50) AS FILE_NAME,
|
||||||
|
TO_CHAR(RECEPTION_DATE, 'YYYY-MM-DD HH24:MI:SS') AS RECEIVED_TIME
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE SOURCE_FILE_NAME LIKE '200%'
|
||||||
|
AND RECEPTION_DATE >= SYSDATE - 7
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Remaining: ' || rec.FILE_NAME);
|
||||||
|
END LOOP;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 2: Verify process logs were cleaned
|
||||||
|
PROMPT Checking process log cleanup...
|
||||||
|
DECLARE
|
||||||
|
vRemainingLogs NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vRemainingLogs
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 7; -- Last week
|
||||||
|
|
||||||
|
IF vRemainingLogs = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: All MARS-956 process logs successfully removed');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vRemainingLogs || ' process log entries still exist');
|
||||||
|
|
||||||
|
-- Show remaining logs (first few)
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT TO_CHAR(LOG_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS LOG_TIME,
|
||||||
|
PROCEDURE_NAME,
|
||||||
|
SUBSTR(LOG_MESSAGE, 1, 40) AS MESSAGE
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 7
|
||||||
|
ORDER BY LOG_TIMESTAMP DESC
|
||||||
|
FETCH FIRST 3 ROWS ONLY
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Remaining: ' || rec.LOG_TIME || ' ' || rec.PROCEDURE_NAME);
|
||||||
|
END LOOP;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 3: Verify cloud bucket cleanup (informational only)
|
||||||
|
PROMPT Checking cloud bucket status...
|
||||||
|
DECLARE
|
||||||
|
vCloudFileCount NUMBER := 0;
|
||||||
|
vCredentialName VARCHAR2(100);
|
||||||
|
vDataBucketUri VARCHAR2(500);
|
||||||
|
BEGIN
|
||||||
|
-- Get bucket URI and credential
|
||||||
|
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ODS');
|
||||||
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Checking ODS bucket: ' || vDataBucketUri);
|
||||||
|
|
||||||
|
-- Count remaining files in cloud bucket
|
||||||
|
BEGIN
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT object_name
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => vCredentialName,
|
||||||
|
location_uri => vDataBucketUri
|
||||||
|
))
|
||||||
|
WHERE object_name LIKE 'ODS/C2D/C2D_MPEC_%'
|
||||||
|
) LOOP
|
||||||
|
vCloudFileCount := vCloudFileCount + 1;
|
||||||
|
IF vCloudFileCount <= 3 THEN -- Show first 3 files
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Cloud file: ' || rec.object_name);
|
||||||
|
END IF;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
IF vCloudFileCount = 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: No C2D MPEC files found in cloud bucket');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('INFO: ' || vCloudFileCount || ' C2D MPEC files still in cloud bucket');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Note: Cloud files are not automatically deleted by rollback');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Manual deletion required if needed');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot check cloud bucket: ' || SQLERRM);
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Check 4: Verify rollback logs were created
|
||||||
|
PROMPT Checking rollback operation logs...
|
||||||
|
DECLARE
|
||||||
|
vRollbackLogs NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vRollbackLogs
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956-ROLLBACK'
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 1/24; -- Last hour
|
||||||
|
|
||||||
|
IF vRollbackLogs > 0 THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Rollback operation logs found: ' || vRollbackLogs);
|
||||||
|
|
||||||
|
-- Show recent rollback logs
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT TO_CHAR(LOG_TIMESTAMP, 'HH24:MI:SS') AS LOG_TIME,
|
||||||
|
PROCEDURE_NAME,
|
||||||
|
LOG_LEVEL,
|
||||||
|
SUBSTR(LOG_MESSAGE, 1, 50) AS MESSAGE
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956-ROLLBACK'
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 1/24
|
||||||
|
ORDER BY LOG_TIMESTAMP DESC
|
||||||
|
) LOOP
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' ' || rec.LOG_TIME || ' [' || rec.LOG_LEVEL || '] ' ||
|
||||||
|
rec.PROCEDURE_NAME || ': ' || rec.MESSAGE);
|
||||||
|
END LOOP;
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Warning: No rollback operation logs found');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Rollback Verification Summary
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
DECLARE
|
||||||
|
vRemainingFiles NUMBER := 0;
|
||||||
|
vRemainingLogs NUMBER := 0;
|
||||||
|
vRollbackStatus VARCHAR2(20);
|
||||||
|
BEGIN
|
||||||
|
-- Count remaining registrations
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vRemainingFiles
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE SOURCE_FILE_NAME LIKE '200%'
|
||||||
|
AND RECEPTION_DATE >= SYSDATE - 7;
|
||||||
|
|
||||||
|
-- Count remaining process logs
|
||||||
|
SELECT COUNT(*)
|
||||||
|
INTO vRemainingLogs
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'MARS-956'
|
||||||
|
AND LOG_TIMESTAMP >= SYSDATE - 7;
|
||||||
|
|
||||||
|
-- Determine rollback status
|
||||||
|
IF vRemainingFiles = 0 AND vRemainingLogs = 0 THEN
|
||||||
|
vRollbackStatus := 'COMPLETE';
|
||||||
|
ELSIF vRemainingFiles = 0 OR vRemainingLogs = 0 THEN
|
||||||
|
vRollbackStatus := 'PARTIAL';
|
||||||
|
ELSE
|
||||||
|
vRollbackStatus := 'INCOMPLETE';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('MARS-956 Rollback Status: ' || vRollbackStatus);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Remaining file registrations: ' || vRemainingFiles);
|
||||||
|
DBMS_OUTPUT.PUT_LINE('- Remaining process logs: ' || vRemainingLogs);
|
||||||
|
|
||||||
|
IF vRollbackStatus = 'COMPLETE' THEN
|
||||||
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: Rollback completed successfully - system clean');
|
||||||
|
ELSE
|
||||||
|
DBMS_OUTPUT.PUT_LINE('WARNING: Rollback incomplete - manual cleanup may be required');
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
DBMS_OUTPUT.PUT_LINE('Note: Cloud bucket files (OCI) are not automatically removed');
|
||||||
|
DBMS_OUTPUT.PUT_LINE(' Use OCI console or DBMS_CLOUD commands for file deletion if needed');
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Rollback Verification Completed
|
||||||
|
PROMPT =========================================================================
|
||||||
88
MARS_Packages/REL02_POST/MARS-956/install_mars956.sql
Normal file
88
MARS_Packages/REL02_POST/MARS-956/install_mars956.sql
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-956 INSTALL SCRIPT: C2D MPEC Data Export to External Tables
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: One-time bulk export of 3 C2D MPEC tables from OU_LEGACY_C2D schema
|
||||||
|
-- to OCI buckets (ODS bucket CSV format)
|
||||||
|
-- Uses DATA_EXPORTER v2.7.5 with pRegisterExport for file registration
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
|
||||||
|
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||||
|
-- Log files are automatically created in log/ subdirectory
|
||||||
|
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||||
|
host mkdir log 2>nul
|
||||||
|
|
||||||
|
var filename VARCHAR2(100)
|
||||||
|
BEGIN
|
||||||
|
:filename := 'log/INSTALL_MARS_956_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
column filename new_value _filename
|
||||||
|
select :filename filename from dual;
|
||||||
|
spool &_filename
|
||||||
|
|
||||||
|
SET ECHO OFF
|
||||||
|
SET TIMING ON
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET PAUSE OFF
|
||||||
|
|
||||||
|
-- Set current schema context (optional - use when modifying packages in specific schema)
|
||||||
|
-- ALTER SESSION SET CURRENT_SCHEMA = CT_MRDS;
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956: C2D MPEC Data Export to External Tables (One-Time Migration)
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT
|
||||||
|
PROMPT This script will export 3 C2D MPEC tables to OCI buckets:
|
||||||
|
PROMPT
|
||||||
|
PROMPT TARGET: ODS Bucket (CSV format):
|
||||||
|
PROMPT - MPEC_ADMIN
|
||||||
|
PROMPT - MPEC_CONTENT
|
||||||
|
PROMPT - MPEC_CONTENT_CRITERION
|
||||||
|
PROMPT
|
||||||
|
PROMPT Key Features:
|
||||||
|
PROMPT - Files registered in A_SOURCE_FILE_RECEIVED for tracking
|
||||||
|
PROMPT - Template table column order matching (CT_ET_TEMPLATES.C2D_MPEC_*)
|
||||||
|
PROMPT - ODS/C2D bucket path structure
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
-- Confirm installation with user
|
||||||
|
ACCEPT continue CHAR PROMPT 'Type YES to continue with installation, or Ctrl+C to abort: '
|
||||||
|
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||||
|
BEGIN
|
||||||
|
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||||
|
RAISE_APPLICATION_ERROR(-20001, 'Installation aborted by user');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
WHENEVER SQLERROR CONTINUE
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 1: Export C2D MPEC Data to ODS Bucket
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@01_MARS_956_export_c2d_mpec_data.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 2: Verify Exports (File Registration Check)
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@02_MARS_956_verify_exports.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 3: Verify Data Integrity (Source vs Exported)
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@03_MARS_956_verify_data_integrity.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Installation - COMPLETED
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Check the log file for complete installation details.
|
||||||
|
PROMPT For rollback, use: rollback_mars956.sql
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
spool off
|
||||||
|
|
||||||
|
quit;
|
||||||
81
MARS_Packages/REL02_POST/MARS-956/rollback_mars956.sql
Normal file
81
MARS_Packages/REL02_POST/MARS-956/rollback_mars956.sql
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-956 ROLLBACK SCRIPT: C2D MPEC Data Export Rollback
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Rollback MARS-956 - Delete exported CSV files and file registrations
|
||||||
|
-- WARNING: This will DELETE all exported data files and registrations!
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-12
|
||||||
|
|
||||||
|
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||||
|
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||||
|
host mkdir log 2>nul
|
||||||
|
|
||||||
|
var filename VARCHAR2(100)
|
||||||
|
BEGIN
|
||||||
|
:filename := 'log/ROLLBACK_MARS_956_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
column filename new_value _filename
|
||||||
|
select :filename filename from dual;
|
||||||
|
spool &_filename
|
||||||
|
|
||||||
|
SET ECHO OFF
|
||||||
|
SET TIMING ON
|
||||||
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
SET PAUSE OFF
|
||||||
|
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956: Rollback C2D MPEC Data Export
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT WARNING: This will DELETE exported CSV files and file registrations!
|
||||||
|
PROMPT - ODS bucket: mrds_data_dev/ODS/C2D/
|
||||||
|
PROMPT - File registrations: A_SOURCE_FILE_RECEIVED entries
|
||||||
|
PROMPT
|
||||||
|
PROMPT Only proceed if export failed and needs to be restarted!
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
-- Confirm rollback with user
|
||||||
|
ACCEPT continue CHAR PROMPT 'Type YES to continue with rollback, or Ctrl+C to abort: '
|
||||||
|
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||||
|
BEGIN
|
||||||
|
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||||
|
RAISE_APPLICATION_ERROR(-20001, 'Rollback aborted by user');
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
WHENEVER SQLERROR CONTINUE
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 1: Delete Exported CSV Files from DATA Bucket
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@90_MARS_956_rollback_delete_csv_files.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 2: Delete File Registrations
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@91_MARS_956_rollback_file_registrations.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 3: Clean Process Logs
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@92_MARS_956_rollback_process_logs.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Step 4: Verify Rollback Completion
|
||||||
|
PROMPT =========================================================================
|
||||||
|
@@99_MARS_956_verify_rollback.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT MARS-956 Rollback - COMPLETED
|
||||||
|
PROMPT =========================================================================
|
||||||
|
PROMPT Check the log file for complete rollback details.
|
||||||
|
PROMPT =========================================================================
|
||||||
|
|
||||||
|
spool off
|
||||||
|
|
||||||
|
quit;
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-1057: Install FILE_MANAGER Package Specification v3.4.0
|
-- MARS-1057: Install FILE_MANAGER Package Specification v3.5.0
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Deploy FILE_MANAGER package specification with new batch external table creation procedures
|
-- Purpose: Deploy FILE_MANAGER package specification with batch external table creation and area filter functionality
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-11-27
|
-- Date: 2026-02-18
|
||||||
-- Related: MARS-1057 Batch External Table Creation
|
-- Related: MARS-1057 Batch External Table Creation + Area Filter Enhancement
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Installing FILE_MANAGER Package Specification v3.4.0
|
PROMPT Installing FILE_MANAGER Package Specification v3.5.0
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
@@new_version/FILE_MANAGER.pkg
|
@@new_version/FILE_MANAGER.pkg
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-1057: Install FILE_MANAGER Package Body v3.4.0
|
-- MARS-1057: Install FILE_MANAGER Package Body v3.5.0
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Deploy FILE_MANAGER package body with implementation of batch external table creation procedures
|
-- Purpose: Deploy FILE_MANAGER package body with batch external table creation and area filter functionality
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-11-27
|
-- Date: 2026-02-18
|
||||||
-- Related: MARS-1057 Batch External Table Creation
|
-- Related: MARS-1057 Batch External Table Creation + Area Filter Enhancement
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Installing FILE_MANAGER Package Body v3.4.0
|
PROMPT Installing FILE_MANAGER Package Body v3.5.0
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
@@new_version/FILE_MANAGER.pkb
|
@@new_version/FILE_MANAGER.pkb
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
-- MARS-1057: Install ODS.FILE_MANAGER_ODS Package Specification
|
-- MARS-1057: Install ODS.FILE_MANAGER_ODS Package Specification
|
||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
-- Purpose: Deploy FILE_MANAGER_ODS package specification v2.2.0
|
-- Purpose: Deploy FILE_MANAGER_ODS package specification v2.4.0
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-11-27
|
-- Date: 2026-02-18
|
||||||
-- Package: ODS.FILE_MANAGER_ODS
|
-- Package: ODS.FILE_MANAGER_ODS
|
||||||
-- Version: 2.1.0 -> 2.2.0
|
-- Version: 2.1.0 -> 2.4.0
|
||||||
-- Changes: Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH wrappers
|
-- Changes: Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH wrappers with pArea and pRestoreGrants parameters
|
||||||
|
|
||||||
SET ECHO ON
|
SET ECHO ON
|
||||||
SET DEFINE OFF
|
SET DEFINE OFF
|
||||||
|
|
||||||
PROMPT Installing ODS.FILE_MANAGER_ODS Package Specification v2.2.0...
|
PROMPT Installing ODS.FILE_MANAGER_ODS Package Specification v2.4.0...
|
||||||
|
|
||||||
@@new_version/FILE_MANAGER_ODS.pkg
|
@@new_version/FILE_MANAGER_ODS.pkg
|
||||||
|
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
-- MARS-1057: Install ODS.FILE_MANAGER_ODS Package Body
|
-- MARS-1057: Install ODS.FILE_MANAGER_ODS Package Body
|
||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
-- Purpose: Deploy FILE_MANAGER_ODS package body v2.2.0
|
-- Purpose: Deploy FILE_MANAGER_ODS package body v2.4.0
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-11-27
|
-- Date: 2026-02-18
|
||||||
-- Package: ODS.FILE_MANAGER_ODS
|
-- Package: ODS.FILE_MANAGER_ODS
|
||||||
-- Version: 2.1.0 -> 2.2.0
|
-- Version: 2.1.0 -> 2.4.0
|
||||||
-- Changes: Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH wrapper implementations
|
-- Changes: Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH wrapper implementations with pArea and pRestoreGrants parameters
|
||||||
|
|
||||||
SET ECHO ON
|
SET ECHO ON
|
||||||
SET DEFINE OFF
|
SET DEFINE OFF
|
||||||
|
|
||||||
PROMPT Installing ODS.FILE_MANAGER_ODS Package Body v2.2.0...
|
PROMPT Installing ODS.FILE_MANAGER_ODS Package Body v2.4.0...
|
||||||
|
|
||||||
@@new_version/FILE_MANAGER_ODS.pkb
|
@@new_version/FILE_MANAGER_ODS.pkb
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,19 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-1057: Rollback FILE_MANAGER Package Specification to v3.3.0
|
-- MARS-1057: Rollback FILE_MANAGER Package Specification to v3.3.0
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Restore FILE_MANAGER package specification to version before MARS-1057 changes
|
-- Purpose: Restore FILE_MANAGER package specification to version before MARS-1057 changes (from v3.5.0 to v3.3.0)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-11-27
|
-- Date: 2026-02-18
|
||||||
-- Related: MARS-1057 Batch External Table Creation (ROLLBACK)
|
-- Related: MARS-1057 Batch External Table Creation + Area Filter (ROLLBACK)
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Rolling back FILE_MANAGER Package Specification to v3.3.0
|
PROMPT Rolling back FILE_MANAGER Package Specification from v3.5.0 to v3.3.0
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
@@current_version/FILE_MANAGER.pkg
|
@@rollback_version/FILE_MANAGER.pkg
|
||||||
|
|
||||||
-- Verify compilation status (check specific schema when installing as ADMIN)
|
-- Verify compilation status (check specific schema when installing as ADMIN)
|
||||||
SELECT object_name, object_type, status
|
SELECT object_name, object_type, status
|
||||||
|
|||||||
@@ -1,19 +1,19 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-1057: Rollback FILE_MANAGER Package Body to v3.3.0
|
-- MARS-1057: Rollback FILE_MANAGER Package Body to v3.3.0
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Restore FILE_MANAGER package body to version before MARS-1057 changes
|
-- Purpose: Restore FILE_MANAGER package body to version before MARS-1057 changes (from v3.5.0 to v3.3.0)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-11-27
|
-- Date: 2026-02-18
|
||||||
-- Related: MARS-1057 Batch External Table Creation (ROLLBACK)
|
-- Related: MARS-1057 Batch External Table Creation + Area Filter (ROLLBACK)
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Rolling back FILE_MANAGER Package Body to v3.3.0
|
PROMPT Rolling back FILE_MANAGER Package Body from v3.5.0 to v3.3.0
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
@@current_version/FILE_MANAGER.pkb
|
@@rollback_version/FILE_MANAGER.pkb
|
||||||
|
|
||||||
-- Verify compilation status (check specific schema when installing as ADMIN)
|
-- Verify compilation status (check specific schema when installing as ADMIN)
|
||||||
SELECT object_name, object_type, status
|
SELECT object_name, object_type, status
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user