Compare commits
21 Commits
866b886c70
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9d4056451 | ||
|
|
60b218d211 | ||
|
|
819b6f7880 | ||
|
|
c68d5bfe2c | ||
|
|
c607bbe26e | ||
|
|
1569237306 | ||
|
|
472a724fe0 | ||
|
|
04d4f6ac02 | ||
|
|
ca5d8b320c | ||
|
|
2605896469 | ||
|
|
b588b0bb72 | ||
|
|
6060f93fde | ||
|
|
99aca3af40 | ||
|
|
1089184367 | ||
|
|
e538706896 | ||
|
|
ff034fcd68 | ||
|
|
b85172ae84 | ||
|
|
577c94f363 | ||
|
|
11723f6c88 | ||
|
|
b63be15f5d | ||
|
|
28972e7428 |
@@ -25,6 +25,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER',
|
||||||
pParallelDegree => 1,
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_ADHOC_ADJUSTMENTS_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
|
||||||
@@ -46,6 +47,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM',
|
||||||
pParallelDegree => 1,
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_ADHOC_ADJUSTMENTS_ITEM',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
|
||||||
@@ -67,6 +69,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pParallelDegree => 1,
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER',
|
||||||
pParallelDegree => 4,
|
pParallelDegree => 4,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_BALANCESHEET_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
|
||||||
@@ -51,6 +52,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM',
|
||||||
pParallelDegree => 16,
|
pParallelDegree => 16,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_BALANCESHEET_ITEM',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER',
|
||||||
pParallelDegree => 1,
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CSM_ADJUSTMENTS_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
|
||||||
@@ -46,6 +47,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM',
|
||||||
pParallelDegree => 2,
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CSM_ADJUSTMENTS_ITEM',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
|
||||||
@@ -67,6 +69,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pParallelDegree => 2,
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES',
|
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES',
|
||||||
pParallelDegree => 8,
|
pParallelDegree => 8,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
|
||||||
@@ -51,6 +52,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER',
|
||||||
pParallelDegree => 2,
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER',
|
||||||
pParallelDegree => 2,
|
pParallelDegree => 2,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CURRENT_ACCOUNTS_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
|
||||||
@@ -47,6 +48,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM',
|
||||||
pParallelDegree => 16,
|
pParallelDegree => 16,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_CURRENT_ACCOUNTS_ITEM',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER',
|
||||||
pParallelDegree => 4,
|
pParallelDegree => 4,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_FORECAST_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
|
||||||
@@ -51,6 +52,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM',
|
||||||
pParallelDegree => 16,
|
pParallelDegree => 16,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_FORECAST_ITEM',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER',
|
||||||
pParallelDegree => 1,
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_QRE_ADJUSTMENTS_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
|
||||||
@@ -46,6 +47,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM',
|
||||||
pParallelDegree => 4,
|
pParallelDegree => 4,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_QRE_ADJUSTMENTS_ITEM',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
|
||||||
@@ -66,8 +68,7 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER',
|
||||||
pParallelDegree => 2,
|
pParallelDegree => 2, pTemplateTableName => 'CT_ET_TEMPLATES.LM_QRE_ADJUSTMENTS_ITEM_HEADER', pJobClass => 'high'
|
||||||
pJobClass => 'high'
|
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER',
|
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER',
|
||||||
pParallelDegree => 1,
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_TTS_HEADER',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
|
||||||
@@ -46,6 +47,7 @@ BEGIN
|
|||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM',
|
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM',
|
||||||
pParallelDegree => 1,
|
pParallelDegree => 1,
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.LM_TTS_ITEM',
|
||||||
pJobClass => 'high'
|
pJobClass => 'high'
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
|
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ PROMPT ========================================
|
|||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD (
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD (
|
||||||
ARCHIVAL_STRATEGY VARCHAR2(30) DEFAULT 'THRESHOLD_BASED' NOT NULL,
|
ARCHIVAL_STRATEGY VARCHAR2(30) DEFAULT 'THRESHOLD_BASED' NOT NULL,
|
||||||
MINIMUM_AGE_MONTHS NUMBER(3) DEFAULT NULL,
|
MINIMUM_AGE_MONTHS NUMBER(3) DEFAULT NULL,
|
||||||
ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
IS_ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
||||||
KEEP_IN_TRASH CHAR(1) DEFAULT 'Y' NOT NULL
|
IS_KEEP_IN_TRASH CHAR(1) DEFAULT 'Y' NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Add check constraints
|
-- Add check constraints
|
||||||
@@ -22,10 +22,10 @@ ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
|||||||
);
|
);
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
||||||
CHK_ARCHIVE_ENABLED CHECK (ARCHIVE_ENABLED IN ('Y', 'N'));
|
CHK_IS_ARCHIVE_ENABLED CHECK (IS_ARCHIVE_ENABLED IN ('Y', 'N'));
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG ADD CONSTRAINT
|
||||||
CHK_KEEP_IN_TRASH CHECK (KEEP_IN_TRASH IN ('Y', 'N'));
|
CHK_IS_KEEP_IN_TRASH CHECK (IS_KEEP_IN_TRASH IN ('Y', 'N'));
|
||||||
|
|
||||||
-- Add comments
|
-- Add comments
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
||||||
@@ -34,10 +34,10 @@ COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
|||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
||||||
'Minimum age in months for archival (used with MINIMUM_AGE_MONTHS or HYBRID strategies)';
|
'Minimum age in months for archival (used with MINIMUM_AGE_MONTHS or HYBRID strategies)';
|
||||||
|
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_ENABLED IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS
|
||||||
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process. Added in MARS-828 v3.3.0';
|
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process. Added in MARS-828 v3.3.0';
|
||||||
|
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH IS
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS
|
||||||
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy. Added in MARS-828 v3.3.0';
|
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy. Added in MARS-828 v3.3.0';
|
||||||
|
|
||||||
-- Verify columns added
|
-- Verify columns added
|
||||||
@@ -50,7 +50,7 @@ SELECT
|
|||||||
FROM all_tab_columns
|
FROM all_tab_columns
|
||||||
WHERE owner = 'CT_MRDS'
|
WHERE owner = 'CT_MRDS'
|
||||||
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'ARCHIVE_ENABLED', 'KEEP_IN_TRASH')
|
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'IS_ARCHIVE_ENABLED', 'IS_KEEP_IN_TRASH')
|
||||||
ORDER BY column_id;
|
ORDER BY column_id;
|
||||||
|
|
||||||
PROMPT ========================================
|
PROMPT ========================================
|
||||||
|
|||||||
@@ -0,0 +1,49 @@
|
|||||||
|
-- MARS-828: Rename threshold columns for consistency
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-01-28
|
||||||
|
-- Description: Renames threshold columns to use consistent ARCHIVE_THRESHOLD_* prefix pattern
|
||||||
|
-- Old naming was inconsistent (DAYS_FOR vs FILES_COUNT_OVER)
|
||||||
|
-- New naming groups all threshold columns with common prefix
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Renaming threshold columns for consistency
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- Rename threshold columns to consistent ARCHIVE_THRESHOLD_* pattern
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN DAYS_FOR_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_DAYS;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN FILES_COUNT_OVER_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_FILES_COUNT;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN BYTES_SUM_OVER_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_BYTES_SUM;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ROWS_COUNT_OVER_ARCHIVE_THRESHOLD TO ARCHIVE_THRESHOLD_ROWS_COUNT;
|
||||||
|
|
||||||
|
-- Verify column renames
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Verifying threshold column renames...
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
column_name,
|
||||||
|
data_type,
|
||||||
|
data_length
|
||||||
|
FROM all_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
|
AND column_name LIKE 'ARCHIVE_THRESHOLD%'
|
||||||
|
ORDER BY column_id;
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Expected columns:
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_DAYS
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_FILES_COUNT
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_BYTES_SUM
|
||||||
|
PROMPT ARCHIVE_THRESHOLD_ROWS_COUNT
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
PROMPT Threshold columns renamed successfully
|
||||||
|
PROMPT ========================================
|
||||||
@@ -0,0 +1,160 @@
|
|||||||
|
-- =====================================================================
|
||||||
|
-- Script: 01b_MARS_828_add_column_comments.sql
|
||||||
|
-- MARS Issue: MARS-828
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Purpose: Add comprehensive column comments for A_SOURCE_FILE_CONFIG and A_SOURCE_FILE_RECEIVED tables
|
||||||
|
-- Description: Documents all columns to improve database maintainability and user understanding
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Adding comprehensive column comments
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- A_SOURCE_FILE_CONFIG Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Adding column comments for A_SOURCE_FILE_CONFIG...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
|
'Primary key - unique identifier for source file configuration record';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY IS
|
||||||
|
'Foreign key to A_SOURCE table - identifies the source system (e.g., LM, C2D, CSDB)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE IS
|
||||||
|
'Type of file configuration: INPUT (data files), CONTAINER (xml files), or LOAD_CONFIG (configuration files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID IS
|
||||||
|
'Unique identifier for the source file within the source system (e.g., UC_DISSEM, STANDING_FACILITIES)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC IS
|
||||||
|
'Human-readable description of the source file and its purpose';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN IS
|
||||||
|
'Filename pattern for matching incoming files (supports wildcards, e.g., UC_NMA_DISSEM-*.csv)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID IS
|
||||||
|
'Identifier for the target table where data will be loaded (without schema prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME IS
|
||||||
|
'Fully qualified name of template table in CT_ET_TEMPLATES schema used for external table creation';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY IS
|
||||||
|
'Foreign key to parent container configuration when this file is part of an xml (NULL for standalone files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_DAYS IS
|
||||||
|
'Threshold for THRESHOLD_BASED strategy: archive data older than N days';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_FILES_COUNT IS
|
||||||
|
'Trigger archival when file count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_BYTES_SUM IS
|
||||||
|
'Trigger archival when total size in bytes exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_ROWS_COUNT IS
|
||||||
|
'Trigger archival when total row count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ODS_SCHEMA_NAME IS
|
||||||
|
'Schema name where ODS external tables are created (typically ODS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.HOURS_TO_EXPIRE_STATISTICS IS
|
||||||
|
'Number of hours before table statistics expire and need to be recalculated';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
||||||
|
'Archival strategy: THRESHOLD_BASED (days-based), MINIMUM_AGE_MONTHS (0=current month, N=retain N months), HYBRID (combination)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
||||||
|
'Minimum age in months before archival (required for MINIMUM_AGE_MONTHS and HYBRID strategies, 0=current month only)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS
|
||||||
|
'Oracle character set name for CSV files (e.g., UTF8, WE8MSWIN1252, EE8ISO8859P2)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS
|
||||||
|
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS
|
||||||
|
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- A_SOURCE_FILE_RECEIVED Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Adding column comments for A_SOURCE_FILE_RECEIVED...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY IS
|
||||||
|
'Primary key - unique identifier for received file record';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
|
'Foreign key to A_SOURCE_FILE_CONFIG - links file to its configuration and processing rules';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME IS
|
||||||
|
'Full object name/path of the received file in OCI Object Storage (includes INBOX prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CHECKSUM IS
|
||||||
|
'MD5 checksum of file content for integrity verification and duplicate detection';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CREATED IS
|
||||||
|
'Timestamp with timezone when file was created/uploaded to Object Storage (from DBMS_CLOUD.LIST_OBJECTS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.BYTES IS
|
||||||
|
'File size in bytes';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE IS
|
||||||
|
'Date when file was registered in the system (extracted from CREATED timestamp)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS IS
|
||||||
|
'Current processing status: RECEIVED → VALIDATED → READY_FOR_INGESTION → INGESTED → ARCHIVED_AND_TRASHED → ARCHIVED_AND_PURGED';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME IS
|
||||||
|
'Name of temporary external table created for file validation (dropped after validation)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_YEAR IS
|
||||||
|
'Year partition value (YYYY format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_MONTH IS
|
||||||
|
'Month partition value (MM format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.ARCH_PATH IS
|
||||||
|
'Archive directory prefix in ARCHIVE bucket containing archived Parquet files (supports multiple files from parallel DBMS_CLOUD.EXPORT_DATA)';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Verification
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Verifying column comments...
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
COUNT(*) as total_columns,
|
||||||
|
COUNT(comments) as documented_columns,
|
||||||
|
COUNT(*) - COUNT(comments) as undocumented_columns
|
||||||
|
FROM all_col_comments
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name IN ('A_SOURCE_FILE_CONFIG', 'A_SOURCE_FILE_RECEIVED')
|
||||||
|
GROUP BY table_name
|
||||||
|
ORDER BY table_name;
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Detailed column documentation status:
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
column_name,
|
||||||
|
CASE WHEN comments IS NULL THEN 'MISSING' ELSE 'OK' END as comment_status
|
||||||
|
FROM all_col_comments
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name IN ('A_SOURCE_FILE_CONFIG', 'A_SOURCE_FILE_RECEIVED')
|
||||||
|
ORDER BY table_name, column_name;
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Column comments added successfully
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT A_SOURCE_FILE_CONFIG: All 20 columns documented
|
||||||
|
PROMPT A_SOURCE_FILE_RECEIVED: All 12 columns documented
|
||||||
|
PROMPT ========================================
|
||||||
@@ -59,9 +59,23 @@ WHERE owner = 'CT_MRDS'
|
|||||||
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
ORDER BY object_type;
|
ORDER BY object_type;
|
||||||
|
|
||||||
-- 5. Check for compilation errors
|
-- 5. Check FILE_MANAGER package compilation status
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT 5. Checking for compilation errors...
|
PROMPT 5. Checking FILE_MANAGER package status...
|
||||||
|
SELECT
|
||||||
|
object_name,
|
||||||
|
object_type,
|
||||||
|
status,
|
||||||
|
TO_CHAR(last_ddl_time, 'YYYY-MM-DD HH24:MI:SS') as last_ddl_time
|
||||||
|
FROM all_objects
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND object_name = 'FILE_MANAGER'
|
||||||
|
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
|
ORDER BY object_type;
|
||||||
|
|
||||||
|
-- 6. Check for compilation errors
|
||||||
|
PROMPT
|
||||||
|
PROMPT 6. Checking for compilation errors (FILE_ARCHIVER)...
|
||||||
SELECT
|
SELECT
|
||||||
name,
|
name,
|
||||||
type,
|
type,
|
||||||
@@ -73,14 +87,31 @@ WHERE owner = 'CT_MRDS'
|
|||||||
AND name = 'FILE_ARCHIVER'
|
AND name = 'FILE_ARCHIVER'
|
||||||
ORDER BY type, sequence;
|
ORDER BY type, sequence;
|
||||||
|
|
||||||
-- 6. Verify package version
|
-- 7. Check for compilation errors (FILE_MANAGER)
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT 6. Verifying FILE_ARCHIVER version...
|
PROMPT 7. Checking for compilation errors (FILE_MANAGER)...
|
||||||
SELECT CT_MRDS.FILE_ARCHIVER.GET_VERSION() as package_version FROM DUAL;
|
SELECT
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
line,
|
||||||
|
position,
|
||||||
|
text
|
||||||
|
FROM all_errors
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND name = 'FILE_MANAGER'
|
||||||
|
ORDER BY type, sequence;
|
||||||
|
|
||||||
-- 7. Test trigger validation
|
-- 8. Verify package versions
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT 7. Testing trigger validation (should fail)...
|
PROMPT 8. Verifying package versions...
|
||||||
|
PROMPT FILE_ARCHIVER version:
|
||||||
|
SELECT CT_MRDS.FILE_ARCHIVER.GET_VERSION() as package_version FROM DUAL;
|
||||||
|
PROMPT FILE_MANAGER version:
|
||||||
|
SELECT CT_MRDS.FILE_MANAGER.GET_VERSION() as package_version FROM DUAL;
|
||||||
|
|
||||||
|
-- 9. Test trigger validation
|
||||||
|
PROMPT
|
||||||
|
PROMPT 9. Testing trigger validation (should fail)...
|
||||||
WHENEVER SQLERROR CONTINUE
|
WHENEVER SQLERROR CONTINUE
|
||||||
SET SERVEROUTPUT ON
|
SET SERVEROUTPUT ON
|
||||||
DECLARE
|
DECLARE
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
--
|
--
|
||||||
-- Configuration by group:
|
-- Configuration by group:
|
||||||
-- - 19 LM tables: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 100K rows OR 1GB, 24h stats
|
-- - 19 LM tables: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 100K rows OR 1GB, 24h stats
|
||||||
-- - 2 CSDB DEBT: MINIMUM_AGE_MONTHS=6, 5 files OR 50K rows OR 512MB, 48h stats
|
-- - 2 CSDB DEBT: MINIMUM_AGE_MONTHS=0 (current month only), 5 files OR 50K rows OR 512MB, 48h stats
|
||||||
-- - 4 CSDB ratings: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 20K rows OR 256MB, 72h stats
|
-- - 4 CSDB ratings: MINIMUM_AGE_MONTHS=0 (current month only), 10 files OR 20K rows OR 256MB, 72h stats
|
||||||
--
|
--
|
||||||
-- Dependencies:
|
-- Dependencies:
|
||||||
@@ -33,7 +33,7 @@ PROMPT - Triggers: 10 files OR 100,000 rows OR 1 GB
|
|||||||
PROMPT - Stats Expiration: 24 hours
|
PROMPT - Stats Expiration: 24 hours
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT CSDB DEBT Tables (2):
|
PROMPT CSDB DEBT Tables (2):
|
||||||
PROMPT - Strategy: MINIMUM_AGE_MONTHS = 6
|
PROMPT - Strategy: MINIMUM_AGE_MONTHS = 0 (current month only)
|
||||||
PROMPT - Triggers: 5 files OR 50,000 rows OR 512 MB
|
PROMPT - Triggers: 5 files OR 50,000 rows OR 512 MB
|
||||||
PROMPT - Stats Expiration: 48 hours
|
PROMPT - Stats Expiration: 48 hours
|
||||||
PROMPT
|
PROMPT
|
||||||
@@ -57,12 +57,12 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
||||||
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
||||||
ODS_SCHEMA_NAME = 'ODS',
|
ODS_SCHEMA_NAME = 'ODS',
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10,
|
ARCHIVE_THRESHOLD_FILES_COUNT = 10,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 100000,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = 100000,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 1073741824, -- 1 GB
|
ARCHIVE_THRESHOLD_BYTES_SUM = 1073741824, -- 1 GB
|
||||||
HOURS_TO_EXPIRE_STATISTICS = 24,
|
HOURS_TO_EXPIRE_STATISTICS = 24,
|
||||||
ARCHIVE_ENABLED = 'Y', -- Enable archival for all LM tables
|
IS_ARCHIVE_ENABLED = 'Y', -- Enable archival for all LM tables
|
||||||
KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
IS_KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'LM'
|
AND A_SOURCE_KEY = 'LM'
|
||||||
AND TABLE_ID IN (
|
AND TABLE_ID IN (
|
||||||
@@ -92,23 +92,23 @@ PROMPT LM tables configuration completed
|
|||||||
PROMPT
|
PROMPT
|
||||||
|
|
||||||
PROMPT =====================================================================
|
PROMPT =====================================================================
|
||||||
PROMPT SECTION 2: CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 6)
|
PROMPT SECTION 2: CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 0)
|
||||||
PROMPT =====================================================================
|
PROMPT =====================================================================
|
||||||
PROMPT Thresholds: 5 files OR 50K rows OR 512MB
|
PROMPT Thresholds: 5 files OR 50K rows OR 512MB
|
||||||
PROMPT Stats expire: 48 hours
|
PROMPT Stats expire: 48 hours
|
||||||
PROMPT =====================================================================
|
PROMPT =====================================================================
|
||||||
|
|
||||||
-- Update CSDB DEBT tables (6-month retention)
|
-- Update CSDB DEBT tables (current month only)
|
||||||
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
||||||
MINIMUM_AGE_MONTHS = 6,
|
MINIMUM_AGE_MONTHS = 0,
|
||||||
ODS_SCHEMA_NAME = 'ODS',
|
ODS_SCHEMA_NAME = 'ODS',
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 5,
|
ARCHIVE_THRESHOLD_FILES_COUNT = 5,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 50000,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = 50000,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 536870912, -- 512 MB
|
ARCHIVE_THRESHOLD_BYTES_SUM = 536870912, -- 512 MB
|
||||||
HOURS_TO_EXPIRE_STATISTICS = 48,
|
HOURS_TO_EXPIRE_STATISTICS = 48,
|
||||||
ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB DEBT tables
|
IS_ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB DEBT tables
|
||||||
KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
IS_KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
AND TABLE_ID IN ('CSDB_DEBT', 'CSDB_DEBT_DAILY');
|
AND TABLE_ID IN ('CSDB_DEBT', 'CSDB_DEBT_DAILY');
|
||||||
@@ -129,12 +129,12 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
||||||
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
MINIMUM_AGE_MONTHS = 0, -- 0 = current month only
|
||||||
ODS_SCHEMA_NAME = 'ODS',
|
ODS_SCHEMA_NAME = 'ODS',
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10,
|
ARCHIVE_THRESHOLD_FILES_COUNT = 10,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 20000,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = 20000,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 268435456, -- 256 MB
|
ARCHIVE_THRESHOLD_BYTES_SUM = 268435456, -- 256 MB
|
||||||
HOURS_TO_EXPIRE_STATISTICS = 72,
|
HOURS_TO_EXPIRE_STATISTICS = 72,
|
||||||
ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB rating/description tables
|
IS_ARCHIVE_ENABLED = 'Y', -- Enable archival for CSDB rating/description tables
|
||||||
KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
IS_KEEP_IN_TRASH = 'N' -- Delete files immediately after archival (no TRASH retention)
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
AND TABLE_ID IN (
|
AND TABLE_ID IN (
|
||||||
@@ -170,21 +170,21 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
AND MINIMUM_AGE_MONTHS = 0
|
AND MINIMUM_AGE_MONTHS = 0
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10
|
AND ARCHIVE_THRESHOLD_FILES_COUNT = 10
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 100000
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT = 100000
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 1073741824
|
AND ARCHIVE_THRESHOLD_BYTES_SUM = 1073741824
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS = 24
|
AND HOURS_TO_EXPIRE_STATISTICS = 24
|
||||||
AND ARCHIVE_ENABLED = 'Y'
|
AND IS_ARCHIVE_ENABLED = 'Y'
|
||||||
AND KEEP_IN_TRASH = 'N'
|
AND IS_KEEP_IN_TRASH = 'N'
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR'
|
ELSE 'ERROR'
|
||||||
END AS STATUS
|
END AS STATUS
|
||||||
@@ -195,28 +195,28 @@ WHERE A_SOURCE_KEY = 'LM'
|
|||||||
ORDER BY TABLE_ID;
|
ORDER BY TABLE_ID;
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 6):
|
PROMPT CSDB DEBT Tables (MINIMUM_AGE_MONTHS = 0):
|
||||||
PROMPT
|
PROMPT
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
AND MINIMUM_AGE_MONTHS = 6
|
AND MINIMUM_AGE_MONTHS = 0
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 5
|
AND ARCHIVE_THRESHOLD_FILES_COUNT = 5
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 50000
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT = 50000
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 536870912
|
AND ARCHIVE_THRESHOLD_BYTES_SUM = 536870912
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS = 48
|
AND HOURS_TO_EXPIRE_STATISTICS = 48
|
||||||
AND ARCHIVE_ENABLED = 'Y'
|
AND IS_ARCHIVE_ENABLED = 'Y'
|
||||||
AND KEEP_IN_TRASH = 'N'
|
AND IS_KEEP_IN_TRASH = 'N'
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR'
|
ELSE 'ERROR'
|
||||||
END AS STATUS
|
END AS STATUS
|
||||||
@@ -234,21 +234,21 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
WHEN ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
AND MINIMUM_AGE_MONTHS = 0
|
AND MINIMUM_AGE_MONTHS = 0
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10
|
AND ARCHIVE_THRESHOLD_FILES_COUNT = 10
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 20000
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT = 20000
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 268435456
|
AND ARCHIVE_THRESHOLD_BYTES_SUM = 268435456
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS = 72
|
AND HOURS_TO_EXPIRE_STATISTICS = 72
|
||||||
AND ARCHIVE_ENABLED = 'Y'
|
AND IS_ARCHIVE_ENABLED = 'Y'
|
||||||
AND KEEP_IN_TRASH = 'N'
|
AND IS_KEEP_IN_TRASH = 'N'
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR'
|
ELSE 'ERROR'
|
||||||
END AS STATUS
|
END AS STATUS
|
||||||
@@ -267,12 +267,12 @@ SELECT
|
|||||||
COUNT(*) AS TOTAL_CONFIGURED,
|
COUNT(*) AS TOTAL_CONFIGURED,
|
||||||
SUM(CASE WHEN MINIMUM_AGE_MONTHS = 0 THEN 1 ELSE 0 END) AS CURRENT_MONTH_ONLY,
|
SUM(CASE WHEN MINIMUM_AGE_MONTHS = 0 THEN 1 ELSE 0 END) AS CURRENT_MONTH_ONLY,
|
||||||
SUM(CASE WHEN MINIMUM_AGE_MONTHS > 0 THEN 1 ELSE 0 END) AS MULTI_MONTH_RETENTION,
|
SUM(CASE WHEN MINIMUM_AGE_MONTHS > 0 THEN 1 ELSE 0 END) AS MULTI_MONTH_RETENTION,
|
||||||
SUM(CASE WHEN FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NOT NULL THEN 1 ELSE 0 END) AS WITH_FILE_THRESHOLD,
|
SUM(CASE WHEN ARCHIVE_THRESHOLD_FILES_COUNT IS NOT NULL THEN 1 ELSE 0 END) AS WITH_FILE_THRESHOLD,
|
||||||
SUM(CASE WHEN ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NOT NULL THEN 1 ELSE 0 END) AS WITH_ROWS_THRESHOLD,
|
SUM(CASE WHEN ARCHIVE_THRESHOLD_ROWS_COUNT IS NOT NULL THEN 1 ELSE 0 END) AS WITH_ROWS_THRESHOLD,
|
||||||
SUM(CASE WHEN BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NOT NULL THEN 1 ELSE 0 END) AS WITH_BYTES_THRESHOLD,
|
SUM(CASE WHEN ARCHIVE_THRESHOLD_BYTES_SUM IS NOT NULL THEN 1 ELSE 0 END) AS WITH_BYTES_THRESHOLD,
|
||||||
SUM(CASE WHEN HOURS_TO_EXPIRE_STATISTICS IS NOT NULL THEN 1 ELSE 0 END) AS WITH_STATS_EXPIRY,
|
SUM(CASE WHEN HOURS_TO_EXPIRE_STATISTICS IS NOT NULL THEN 1 ELSE 0 END) AS WITH_STATS_EXPIRY,
|
||||||
SUM(CASE WHEN ARCHIVE_ENABLED = 'Y' THEN 1 ELSE 0 END) AS ARCHIVAL_ENABLED,
|
SUM(CASE WHEN IS_ARCHIVE_ENABLED = 'Y' THEN 1 ELSE 0 END) AS ARCHIVAL_ENABLED,
|
||||||
SUM(CASE WHEN KEEP_IN_TRASH = 'N' THEN 1 ELSE 0 END) AS IMMEDIATE_DELETE
|
SUM(CASE WHEN IS_KEEP_IN_TRASH = 'N' THEN 1 ELSE 0 END) AS IMMEDIATE_DELETE
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND ((A_SOURCE_KEY = 'LM' AND TABLE_ID LIKE 'LM_%')
|
AND ((A_SOURCE_KEY = 'LM' AND TABLE_ID LIKE 'LM_%')
|
||||||
@@ -306,9 +306,9 @@ SELECT
|
|||||||
COUNT(*) AS TABLE_COUNT,
|
COUNT(*) AS TABLE_COUNT,
|
||||||
MAX(ARCHIVAL_STRATEGY) AS STRATEGY,
|
MAX(ARCHIVAL_STRATEGY) AS STRATEGY,
|
||||||
MAX(MINIMUM_AGE_MONTHS) AS MIN_AGE,
|
MAX(MINIMUM_AGE_MONTHS) AS MIN_AGE,
|
||||||
MAX(FILES_COUNT_OVER_ARCHIVE_THRESHOLD) AS FILES_THRESHOLD,
|
MAX(ARCHIVE_THRESHOLD_FILES_COUNT) AS FILES_THRESHOLD,
|
||||||
MAX(ROWS_COUNT_OVER_ARCHIVE_THRESHOLD) AS ROWS_THRESHOLD,
|
MAX(ARCHIVE_THRESHOLD_ROWS_COUNT) AS ROWS_THRESHOLD,
|
||||||
ROUND(MAX(BYTES_SUM_OVER_ARCHIVE_THRESHOLD)/1048576, 0) || ' MB' AS BYTES_THRESHOLD,
|
ROUND(MAX(ARCHIVE_THRESHOLD_BYTES_SUM)/1048576, 0) || ' MB' AS BYTES_THRESHOLD,
|
||||||
MAX(HOURS_TO_EXPIRE_STATISTICS) AS STATS_HOURS
|
MAX(HOURS_TO_EXPIRE_STATISTICS) AS STATS_HOURS
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
|||||||
@@ -0,0 +1,29 @@
|
|||||||
|
--=============================================================================================================================
|
||||||
|
-- MARS-828: Install CT_MRDS.FILE_MANAGER Package Specification v3.3.2
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- Purpose: Deploy FILE_MANAGER Package Specification with MARS-828 column compatibility
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Related: MARS-828 Threshold Column Rename Compatibility
|
||||||
|
--=============================================================================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT Installing CT_MRDS.FILE_MANAGER Package Specification v3.3.2
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
@@new_version/FILE_MANAGER.pkg
|
||||||
|
|
||||||
|
-- Verify package compilation (check specific schema when installing as ADMIN)
|
||||||
|
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||||
|
FROM ALL_OBJECTS
|
||||||
|
WHERE OWNER = 'CT_MRDS'
|
||||||
|
AND OBJECT_NAME = 'FILE_MANAGER'
|
||||||
|
AND OBJECT_TYPE = 'PACKAGE';
|
||||||
|
|
||||||
|
PROMPT SUCCESS: FILE_MANAGER Package Specification v3.3.2 installed
|
||||||
|
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- End of Script
|
||||||
|
--=============================================================================================================================
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
--=============================================================================================================================
|
||||||
|
-- MARS-828: Install CT_MRDS.FILE_MANAGER Package Body v3.3.2
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- Purpose: Deploy FILE_MANAGER Package Body with MARS-828 threshold column compatibility
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Related: MARS-828 Threshold Column Rename Compatibility
|
||||||
|
--=============================================================================================================================
|
||||||
|
|
||||||
|
SET SERVEROUTPUT ON
|
||||||
|
|
||||||
|
PROMPT ========================================================================
|
||||||
|
PROMPT Installing CT_MRDS.FILE_MANAGER Package Body v3.3.2
|
||||||
|
PROMPT ========================================================================
|
||||||
|
|
||||||
|
@@new_version/FILE_MANAGER.pkb
|
||||||
|
|
||||||
|
-- Verify package compilation (check specific schema when installing as ADMIN)
|
||||||
|
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||||
|
FROM ALL_OBJECTS
|
||||||
|
WHERE OWNER = 'CT_MRDS'
|
||||||
|
AND OBJECT_NAME = 'FILE_MANAGER'
|
||||||
|
AND OBJECT_TYPE IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
|
ORDER BY OBJECT_TYPE;
|
||||||
|
|
||||||
|
-- Check for any compilation errors
|
||||||
|
SELECT 'COMPILATION ERRORS FOUND' AS WARNING
|
||||||
|
FROM ALL_ERRORS
|
||||||
|
WHERE OWNER = 'CT_MRDS'
|
||||||
|
AND NAME = 'FILE_MANAGER'
|
||||||
|
AND TYPE = 'PACKAGE BODY'
|
||||||
|
AND ROWNUM = 1;
|
||||||
|
|
||||||
|
PROMPT SUCCESS: FILE_MANAGER Package Body v3.3.2 installed
|
||||||
|
|
||||||
|
--=============================================================================================================================
|
||||||
|
-- End of Script
|
||||||
|
--=============================================================================================================================
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
-- MARS-828: Rollback archival strategy columns
|
-- MARS-828: Rollback archival strategy columns
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2026-01-27
|
-- Date: 2026-01-27
|
||||||
-- Description: Remove ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, ARCHIVE_ENABLED, and KEEP_IN_TRASH columns
|
-- Description: Remove ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, IS_ARCHIVE_ENABLED, and IS_KEEP_IN_TRASH columns
|
||||||
|
|
||||||
PROMPT ========================================
|
PROMPT ========================================
|
||||||
PROMPT MARS-828: Removing archival strategy and config columns
|
PROMPT MARS-828: Removing archival strategy and config columns
|
||||||
@@ -12,17 +12,20 @@ ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
DROP CONSTRAINT CHK_ARCHIVAL_STRATEGY;
|
DROP CONSTRAINT CHK_ARCHIVAL_STRATEGY;
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
DROP CONSTRAINT CHK_ARCHIVE_ENABLED;
|
DROP CONSTRAINT CHK_IS_ARCHIVE_ENABLED;
|
||||||
|
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
DROP CONSTRAINT CHK_KEEP_IN_TRASH;
|
DROP CONSTRAINT CHK_IS_KEEP_IN_TRASH;
|
||||||
|
|
||||||
-- Drop columns
|
-- Drop columns
|
||||||
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG DROP (
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG DROP (
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS
|
||||||
ARCHIVE_ENABLED,
|
);
|
||||||
KEEP_IN_TRASH
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG DROP (
|
||||||
|
IS_ARCHIVE_ENABLED,
|
||||||
|
IS_KEEP_IN_TRASH
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Verify columns dropped
|
-- Verify columns dropped
|
||||||
@@ -31,7 +34,7 @@ SELECT
|
|||||||
FROM all_tab_columns
|
FROM all_tab_columns
|
||||||
WHERE owner = 'CT_MRDS'
|
WHERE owner = 'CT_MRDS'
|
||||||
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'ARCHIVE_ENABLED', 'KEEP_IN_TRASH');
|
AND column_name IN ('ARCHIVAL_STRATEGY', 'MINIMUM_AGE_MONTHS', 'IS_ARCHIVE_ENABLED', 'IS_KEEP_IN_TRASH');
|
||||||
|
|
||||||
PROMPT ========================================
|
PROMPT ========================================
|
||||||
PROMPT Archival strategy and config columns removed successfully
|
PROMPT Archival strategy and config columns removed successfully
|
||||||
|
|||||||
@@ -0,0 +1,47 @@
|
|||||||
|
-- MARS-828: Rollback threshold column renames
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-01-28
|
||||||
|
-- Description: Reverts threshold columns back to original naming
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Rolling back threshold column renames
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- Revert threshold columns to original names
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_DAYS TO DAYS_FOR_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_FILES_COUNT TO FILES_COUNT_OVER_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_BYTES_SUM TO BYTES_SUM_OVER_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
ALTER TABLE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
RENAME COLUMN ARCHIVE_THRESHOLD_ROWS_COUNT TO ROWS_COUNT_OVER_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
-- Verify rollback
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Verifying threshold column rollback...
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
column_name,
|
||||||
|
data_type,
|
||||||
|
data_length
|
||||||
|
FROM all_tab_columns
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name = 'A_SOURCE_FILE_CONFIG'
|
||||||
|
AND (column_name LIKE '%ARCHIVE_THRESHOLD%' OR column_name LIKE 'DAYS_FOR%')
|
||||||
|
ORDER BY column_id;
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Expected original columns:
|
||||||
|
PROMPT DAYS_FOR_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT FILES_COUNT_OVER_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT BYTES_SUM_OVER_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT ROWS_COUNT_OVER_ARCHIVE_THRESHOLD
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
PROMPT Threshold column renames rolled back successfully
|
||||||
|
PROMPT ========================================
|
||||||
@@ -0,0 +1,84 @@
|
|||||||
|
-- =====================================================================
|
||||||
|
-- Script: 94b_MARS_828_rollback_column_comments.sql
|
||||||
|
-- MARS Issue: MARS-828
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- Purpose: Remove column comments added by 01b_MARS_828_add_column_comments.sql
|
||||||
|
-- Description: Optional rollback - removes documentation but does not affect functionality
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT MARS-828: Removing column comments (optional)
|
||||||
|
PROMPT ========================================
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Remove A_SOURCE_FILE_CONFIG Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Removing column comments from A_SOURCE_FILE_CONFIG...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_DAYS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_FILES_COUNT IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_BYTES_SUM IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_ROWS_COUNT IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ODS_SCHEMA_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.HOURS_TO_EXPIRE_STATISTICS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS '';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Remove A_SOURCE_FILE_RECEIVED Column Comments
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT Removing column comments from A_SOURCE_FILE_RECEIVED...
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CHECKSUM IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CREATED IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.BYTES IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_YEAR IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_MONTH IS '';
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.ARCH_PATH IS '';
|
||||||
|
|
||||||
|
-- =====================================================================
|
||||||
|
-- Verification
|
||||||
|
-- =====================================================================
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Verifying column comments removed...
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
COUNT(*) as total_columns,
|
||||||
|
COUNT(CASE WHEN comments IS NOT NULL AND LENGTH(comments) > 0 THEN 1 END) as documented_columns
|
||||||
|
FROM all_col_comments
|
||||||
|
WHERE owner = 'CT_MRDS'
|
||||||
|
AND table_name IN ('A_SOURCE_FILE_CONFIG', 'A_SOURCE_FILE_RECEIVED')
|
||||||
|
GROUP BY table_name
|
||||||
|
ORDER BY table_name;
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT Column comments removed successfully
|
||||||
|
PROMPT ========================================
|
||||||
|
PROMPT NOTE: This is an optional rollback step
|
||||||
|
PROMPT Database functionality is not affected
|
||||||
|
PROMPT ========================================
|
||||||
@@ -10,9 +10,9 @@
|
|||||||
-- archival parameters back to NULL (unconfigured state):
|
-- archival parameters back to NULL (unconfigured state):
|
||||||
-- - ARCHIVAL_STRATEGY
|
-- - ARCHIVAL_STRATEGY
|
||||||
-- - MINIMUM_AGE_MONTHS
|
-- - MINIMUM_AGE_MONTHS
|
||||||
-- - FILES_COUNT_OVER_ARCHIVE_THRESHOLD
|
-- - ARCHIVE_THRESHOLD_FILES_COUNT
|
||||||
-- - ROWS_COUNT_OVER_ARCHIVE_THRESHOLD
|
-- - ARCHIVE_THRESHOLD_ROWS_COUNT
|
||||||
-- - BYTES_SUM_OVER_ARCHIVE_THRESHOLD
|
-- - ARCHIVE_THRESHOLD_BYTES_SUM
|
||||||
-- - HOURS_TO_EXPIRE_STATISTICS
|
-- - HOURS_TO_EXPIRE_STATISTICS
|
||||||
--
|
--
|
||||||
-- This script reverts changes made by:
|
-- This script reverts changes made by:
|
||||||
@@ -47,9 +47,9 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = NULL,
|
SET ARCHIVAL_STRATEGY = NULL,
|
||||||
MINIMUM_AGE_MONTHS = NULL,
|
MINIMUM_AGE_MONTHS = NULL,
|
||||||
ODS_SCHEMA_NAME = NULL,
|
ODS_SCHEMA_NAME = NULL,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_FILES_COUNT = NULL,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = NULL,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_BYTES_SUM = NULL,
|
||||||
HOURS_TO_EXPIRE_STATISTICS = NULL
|
HOURS_TO_EXPIRE_STATISTICS = NULL
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'LM'
|
AND A_SOURCE_KEY = 'LM'
|
||||||
@@ -88,9 +88,9 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = NULL,
|
SET ARCHIVAL_STRATEGY = NULL,
|
||||||
MINIMUM_AGE_MONTHS = NULL,
|
MINIMUM_AGE_MONTHS = NULL,
|
||||||
ODS_SCHEMA_NAME = NULL,
|
ODS_SCHEMA_NAME = NULL,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_FILES_COUNT = NULL,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = NULL,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_BYTES_SUM = NULL,
|
||||||
HOURS_TO_EXPIRE_STATISTICS = NULL
|
HOURS_TO_EXPIRE_STATISTICS = NULL
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
@@ -109,9 +109,9 @@ UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
|||||||
SET ARCHIVAL_STRATEGY = NULL,
|
SET ARCHIVAL_STRATEGY = NULL,
|
||||||
MINIMUM_AGE_MONTHS = NULL,
|
MINIMUM_AGE_MONTHS = NULL,
|
||||||
ODS_SCHEMA_NAME = NULL,
|
ODS_SCHEMA_NAME = NULL,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_FILES_COUNT = NULL,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_ROWS_COUNT = NULL,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = NULL,
|
ARCHIVE_THRESHOLD_BYTES_SUM = NULL,
|
||||||
HOURS_TO_EXPIRE_STATISTICS = NULL
|
HOURS_TO_EXPIRE_STATISTICS = NULL
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND A_SOURCE_KEY = 'CSDB'
|
AND A_SOURCE_KEY = 'CSDB'
|
||||||
@@ -148,16 +148,16 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY IS NULL
|
WHEN ARCHIVAL_STRATEGY IS NULL
|
||||||
AND MINIMUM_AGE_MONTHS IS NULL
|
AND MINIMUM_AGE_MONTHS IS NULL
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_FILES_COUNT IS NULL
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT IS NULL
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_BYTES_SUM IS NULL
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR - Still configured'
|
ELSE 'ERROR - Still configured'
|
||||||
@@ -176,16 +176,16 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY IS NULL
|
WHEN ARCHIVAL_STRATEGY IS NULL
|
||||||
AND MINIMUM_AGE_MONTHS IS NULL
|
AND MINIMUM_AGE_MONTHS IS NULL
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_FILES_COUNT IS NULL
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT IS NULL
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_BYTES_SUM IS NULL
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR - Still configured'
|
ELSE 'ERROR - Still configured'
|
||||||
@@ -204,16 +204,16 @@ SELECT
|
|||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVAL_STRATEGY,
|
ARCHIVAL_STRATEGY,
|
||||||
MINIMUM_AGE_MONTHS,
|
MINIMUM_AGE_MONTHS,
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THR,
|
ARCHIVE_THRESHOLD_FILES_COUNT AS FILE_THR,
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THR,
|
ARCHIVE_THRESHOLD_ROWS_COUNT AS ROW_THR,
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THR,
|
ARCHIVE_THRESHOLD_BYTES_SUM AS BYTE_THR,
|
||||||
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
HOURS_TO_EXPIRE_STATISTICS AS STATS_HRS,
|
||||||
CASE
|
CASE
|
||||||
WHEN ARCHIVAL_STRATEGY IS NULL
|
WHEN ARCHIVAL_STRATEGY IS NULL
|
||||||
AND MINIMUM_AGE_MONTHS IS NULL
|
AND MINIMUM_AGE_MONTHS IS NULL
|
||||||
AND FILES_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_FILES_COUNT IS NULL
|
||||||
AND ROWS_COUNT_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_ROWS_COUNT IS NULL
|
||||||
AND BYTES_SUM_OVER_ARCHIVE_THRESHOLD IS NULL
|
AND ARCHIVE_THRESHOLD_BYTES_SUM IS NULL
|
||||||
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
AND HOURS_TO_EXPIRE_STATISTICS IS NULL
|
||||||
THEN 'OK'
|
THEN 'OK'
|
||||||
ELSE 'ERROR - Still configured'
|
ELSE 'ERROR - Still configured'
|
||||||
|
|||||||
@@ -0,0 +1,10 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-828: Rollback FILE_MANAGER Package Specification to v3.3.1
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Restore previous package specification version (pre-threshold column rename compatibility)
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- WARNING: This removes MARS-828 threshold column compatibility from FILE_MANAGER
|
||||||
|
-- ===================================================================
|
||||||
|
|
||||||
|
@@rollback_version/FILE_MANAGER.pkg
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
-- ===================================================================
|
||||||
|
-- MARS-828: Rollback FILE_MANAGER Package Body to v3.3.1
|
||||||
|
-- ===================================================================
|
||||||
|
-- Purpose: Restore previous package body version (pre-threshold column rename compatibility)
|
||||||
|
-- Author: Grzegorz Michalski
|
||||||
|
-- Date: 2026-02-20
|
||||||
|
-- WARNING: This removes MARS-828 threshold column compatibility from FILE_MANAGER
|
||||||
|
-- ===================================================================
|
||||||
|
|
||||||
|
@@rollback_version/FILE_MANAGER.pkb
|
||||||
@@ -35,10 +35,10 @@ PROMPT
|
|||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
PROMPT MARS-828 Installation Starting
|
PROMPT MARS-828 Installation Starting
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
PROMPT Package: CT_MRDS.FILE_ARCHIVER
|
PROMPT Package: CT_MRDS.FILE_ARCHIVER v3.3.0 + CT_MRDS.FILE_MANAGER v3.3.2
|
||||||
PROMPT Change: Enhanced archival strategies (MINIMUM_AGE_MONTHS, HYBRID) + TRASH retention + Selective archiving
|
PROMPT Change: Enhanced archival strategies (MINIMUM_AGE_MONTHS, HYBRID) + TRASH retention + Selective archiving + FILE_MANAGER compatibility
|
||||||
PROMPT Purpose: Flexible archival policies per data source with file retention and config-based control
|
PROMPT Purpose: Flexible archival policies per data source with file retention and config-based control
|
||||||
PROMPT Steps: 10 (DDL, Trigger, Statuses, Grants, Package v3.3.0, Verify, Track, Configure)
|
PROMPT Steps: 14 (DDL, Rename, Comments, Trigger, Statuses, Grants, Packages, Verify, Track, Configure)
|
||||||
PROMPT Timestamp:
|
PROMPT Timestamp:
|
||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_start FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_start FROM DUAL;
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
@@ -56,52 +56,72 @@ WHENEVER SQLERROR CONTINUE
|
|||||||
|
|
||||||
-- Installation steps
|
-- Installation steps
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 1/9: Adding archival strategy and config columns to A_SOURCE_FILE_CONFIG
|
PROMPT Step 1/12: Adding archival strategy and config columns to A_SOURCE_FILE_CONFIG
|
||||||
PROMPT =============================================================================
|
PROMPT ==============================================================================
|
||||||
@@01_MARS_828_install_add_archival_strategy_columns.sql
|
@@01_MARS_828_install_add_archival_strategy_columns.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 2/9: Creating validation trigger
|
PROMPT Step 2/12: Renaming threshold columns for consistent naming
|
||||||
|
PROMPT ==========================================================
|
||||||
|
@@01a_MARS_828_rename_threshold_columns.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 3/12: Adding comprehensive column comments
|
||||||
|
PROMPT ===============================================
|
||||||
|
@@01b_MARS_828_add_column_comments.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 4/12: Creating validation trigger
|
||||||
PROMPT ======================================
|
PROMPT ======================================
|
||||||
@@02_MARS_828_install_archival_strategy_trigger.sql
|
@@02_MARS_828_install_archival_strategy_trigger.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 3/10: Adding TRASH retention statuses to A_SOURCE_FILE_RECEIVED
|
PROMPT Step 5/12: Adding TRASH retention statuses to A_SOURCE_FILE_RECEIVED
|
||||||
PROMPT =====================================================================
|
PROMPT ===================================================================
|
||||||
@@07_MARS_828_install_add_trash_retention_statuses.sql
|
@@07_MARS_828_install_add_trash_retention_statuses.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 4/10: Granting privileges on T_FILENAME to MRDS_LOADER
|
PROMPT Step 6/12: Granting privileges on T_FILENAME to MRDS_LOADER
|
||||||
PROMPT ============================================================
|
PROMPT ==========================================================
|
||||||
@@08_MARS_828_install_grant_t_filename.sql
|
@@08_MARS_828_install_grant_t_filename.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 5/10: Deploying FILE_ARCHIVER Package Specification v3.3.0
|
PROMPT Step 7/12: Deploying FILE_ARCHIVER Package Specification v3.3.0
|
||||||
PROMPT ================================================================
|
PROMPT ==============================================================
|
||||||
@@03_MARS_828_install_CT_MRDS_FILE_ARCHIVER_SPEC.sql
|
@@03_MARS_828_install_CT_MRDS_FILE_ARCHIVER_SPEC.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 6/10: Deploying FILE_ARCHIVER Package Body v3.3.0
|
PROMPT Step 8/14: Deploying FILE_ARCHIVER Package Body v3.3.0
|
||||||
PROMPT ======================================================
|
PROMPT ====================================================
|
||||||
@@04_MARS_828_install_CT_MRDS_FILE_ARCHIVER_BODY.sql
|
@@04_MARS_828_install_CT_MRDS_FILE_ARCHIVER_BODY.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 7/10: Verifying installation
|
PROMPT Step 9/14: Deploying FILE_MANAGER Package Specification v3.3.2
|
||||||
PROMPT =================================
|
PROMPT =============================================================
|
||||||
|
@@09_MARS_828_install_CT_MRDS_FILE_MANAGER_SPEC.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 10/14: Deploying FILE_MANAGER Package Body v3.3.2
|
||||||
|
PROMPT ===================================================
|
||||||
|
@@10_MARS_828_install_CT_MRDS_FILE_MANAGER_BODY.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 11/14: Verifying installation
|
||||||
|
PROMPT ==================================
|
||||||
@@05_MARS_828_verify_installation.sql
|
@@05_MARS_828_verify_installation.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 8/10: Tracking package versions
|
PROMPT Step 12/14: Tracking package versions
|
||||||
PROMPT ====================================
|
PROMPT =====================================
|
||||||
@@track_package_versions.sql
|
@@track_package_versions.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 9/10: Verifying tracked packages
|
PROMPT Step 13/14: Verifying tracked packages
|
||||||
PROMPT =====================================
|
PROMPT ======================================
|
||||||
@@verify_packages_version.sql
|
@@verify_packages_version.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 10/10: Configuring Release 01 tables archival strategies
|
PROMPT Step 14/14: Configuring Release 01 tables archival strategies
|
||||||
PROMPT ============================================================
|
PROMPT ============================================================
|
||||||
@@06_MARS_828_configure_release01_tables.sql
|
@@06_MARS_828_configure_release01_tables.sql
|
||||||
|
|
||||||
@@ -113,12 +133,13 @@ PROMPT Completion Time:
|
|||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_end FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_end FROM DUAL;
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Installation Summary:
|
PROMPT Installation Summary:
|
||||||
PROMPT - Package: CT_MRDS.FILE_ARCHIVER
|
PROMPT - Packages Installed:
|
||||||
PROMPT - Version: 3.3.0 (includes selective archiving and config-based TRASH policy)
|
PROMPT * CT_MRDS.FILE_ARCHIVER v3.3.0 (includes selective archiving and config-based TRASH policy)
|
||||||
|
PROMPT * CT_MRDS.FILE_MANAGER v3.3.2 (compatible with MARS-828 threshold column renames)
|
||||||
PROMPT - Strategies: THRESHOLD_BASED (default), MINIMUM_AGE_MONTHS (0=current month), HYBRID
|
PROMPT - Strategies: THRESHOLD_BASED (default), MINIMUM_AGE_MONTHS (0=current month), HYBRID
|
||||||
PROMPT - Selective Archiving: ARCHIVE_ENABLED column (Y=archive, N=skip)
|
PROMPT - Selective Archiving: IS_ARCHIVE_ENABLED column (Y=archive, N=skip)
|
||||||
PROMPT - TRASH Policy: KEEP_IN_TRASH column (Y=keep files, N=delete immediately)
|
PROMPT - TRASH Policy: IS_KEEP_IN_TRASH column (Y=keep files, N=delete immediately)
|
||||||
PROMPT * Default: ARCHIVE_ENABLED='Y', KEEP_IN_TRASH='N' (archiving enabled, immediate deletion)
|
PROMPT * Default: IS_ARCHIVE_ENABLED='Y', IS_KEEP_IN_TRASH='N' (archiving enabled, immediate deletion)
|
||||||
PROMPT * TRASH is a subfolder in DATA bucket (e.g., TRASH/LM/TABLE_NAME)
|
PROMPT * TRASH is a subfolder in DATA bucket (e.g., TRASH/LM/TABLE_NAME)
|
||||||
PROMPT * No more pKeepInTrash parameter - policy from config only
|
PROMPT * No more pKeepInTrash parameter - policy from config only
|
||||||
PROMPT - New Procedure: ARCHIVE_ALL_FOR_SOURCE(pSourceKey) for batch processing
|
PROMPT - New Procedure: ARCHIVE_ALL_FOR_SOURCE(pSourceKey) for batch processing
|
||||||
|
|||||||
@@ -16,20 +16,20 @@ CREATE TABLE CT_MRDS.A_SOURCE_FILE_CONFIG (
|
|||||||
TABLE_ID VARCHAR2(200),
|
TABLE_ID VARCHAR2(200),
|
||||||
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
||||||
CONTAINER_FILE_KEY NUMBER(38,0),
|
CONTAINER_FILE_KEY NUMBER(38,0),
|
||||||
DAYS_FOR_ARCHIVE_THRESHOLD NUMBER(4,0),
|
ARCHIVE_THRESHOLD_DAYS NUMBER(4,0),
|
||||||
FILES_COUNT_OVER_ARCHIVE_THRESHOLD NUMBER(38,0),
|
ARCHIVE_THRESHOLD_FILES_COUNT NUMBER(38,0),
|
||||||
BYTES_SUM_OVER_ARCHIVE_THRESHOLD NUMBER(38,0),
|
ARCHIVE_THRESHOLD_BYTES_SUM NUMBER(38,0),
|
||||||
ODS_SCHEMA_NAME VARCHAR2(100),
|
ODS_SCHEMA_NAME VARCHAR2(100),
|
||||||
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD NUMBER(38,0),
|
ARCHIVE_THRESHOLD_ROWS_COUNT NUMBER(38,0),
|
||||||
HOURS_TO_EXPIRE_STATISTICS NUMBER(38,3),
|
HOURS_TO_EXPIRE_STATISTICS NUMBER(38,3),
|
||||||
ARCHIVAL_STRATEGY VARCHAR2(50),
|
ARCHIVAL_STRATEGY VARCHAR2(50),
|
||||||
MINIMUM_AGE_MONTHS NUMBER(3,0),
|
MINIMUM_AGE_MONTHS NUMBER(3,0),
|
||||||
ENCODING VARCHAR2(50) DEFAULT 'UTF8',
|
ENCODING VARCHAR2(50) DEFAULT 'UTF8',
|
||||||
ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
IS_ARCHIVE_ENABLED CHAR(1) DEFAULT 'N' NOT NULL,
|
||||||
KEEP_IN_TRASH CHAR(1) DEFAULT 'N' NOT NULL,
|
IS_KEEP_IN_TRASH CHAR(1) DEFAULT 'N' NOT NULL,
|
||||||
CONSTRAINT A_SOURCE_FILE_CONFIG_PK PRIMARY KEY (A_SOURCE_FILE_CONFIG_KEY),
|
CONSTRAINT A_SOURCE_FILE_CONFIG_PK PRIMARY KEY (A_SOURCE_FILE_CONFIG_KEY),
|
||||||
CONSTRAINT CHK_ARCHIVE_ENABLED CHECK (ARCHIVE_ENABLED IN ('Y', 'N')),
|
CONSTRAINT CHK_IS_ARCHIVE_ENABLED CHECK (IS_ARCHIVE_ENABLED IN ('Y', 'N')),
|
||||||
CONSTRAINT CHK_KEEP_IN_TRASH CHECK (KEEP_IN_TRASH IN ('Y', 'N')),
|
CONSTRAINT CHK_IS_KEEP_IN_TRASH CHECK (IS_KEEP_IN_TRASH IN ('Y', 'N')),
|
||||||
CONSTRAINT SOURCE_FILE_TYPE_CHK CHECK (SOURCE_FILE_TYPE IN ('INPUT', 'CONTAINER', 'LOAD_CONFIG')),
|
CONSTRAINT SOURCE_FILE_TYPE_CHK CHECK (SOURCE_FILE_TYPE IN ('INPUT', 'CONTAINER', 'LOAD_CONFIG')),
|
||||||
CONSTRAINT ASFC_A_SOURCE_KEY_FK FOREIGN KEY(A_SOURCE_KEY) REFERENCES CT_MRDS.A_SOURCE(A_SOURCE_KEY),
|
CONSTRAINT ASFC_A_SOURCE_KEY_FK FOREIGN KEY(A_SOURCE_KEY) REFERENCES CT_MRDS.A_SOURCE(A_SOURCE_KEY),
|
||||||
CONSTRAINT ASFC_CONTAINER_FILE_KEY_FK FOREIGN KEY(CONTAINER_FILE_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
CONSTRAINT ASFC_CONTAINER_FILE_KEY_FK FOREIGN KEY(CONTAINER_FILE_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||||
@@ -47,10 +47,64 @@ ON "CT_MRDS"."A_SOURCE_FILE_CONFIG" ("SOURCE_FILE_TYPE", "SOURCE_FILE_ID", "TABL
|
|||||||
TABLESPACE "DATA";
|
TABLESPACE "DATA";
|
||||||
|
|
||||||
-- Column comments
|
-- Column comments
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS 'Archival strategy: THRESHOLD_BASED, CURRENT_MONTH_ONLY, MINIMUM_AGE_MONTHS, HYBRID. Added in MARS-828';
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS 'Minimum age in months before archival (required for MINIMUM_AGE_MONTHS strategy). Added in MARS-828';
|
'Primary key - unique identifier for source file configuration record';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS 'Oracle character set name for CSV files (e.g., UTF8, WE8MSWIN1252, EE8ISO8859P2). Added in MARS-1049';
|
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_ENABLED IS 'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process. Added in MARS-828 v3.3.0';
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY IS
|
||||||
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH IS 'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy. Added in MARS-828 v3.3.0';
|
'Foreign key to A_SOURCE table - identifies the source system (e.g., LM, C2D, CSDB)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE IS
|
||||||
|
'Type of file configuration: INPUT (data files), CONTAINER (xml files), or LOAD_CONFIG (configuration files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID IS
|
||||||
|
'Unique identifier for the source file within the source system (e.g., UC_DISSEM, STANDING_FACILITIES)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC IS
|
||||||
|
'Human-readable description of the source file and its purpose';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN IS
|
||||||
|
'Filename pattern for matching incoming files (supports wildcards, e.g., UC_NMA_DISSEM-*.csv)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID IS
|
||||||
|
'Identifier for the target table where data will be loaded (without schema prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME IS
|
||||||
|
'Fully qualified name of template table in CT_ET_TEMPLATES schema used for external table creation';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY IS
|
||||||
|
'Foreign key to parent container configuration when this file is part of an xml (NULL for standalone files)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_DAYS IS
|
||||||
|
'Threshold for THRESHOLD_BASED strategy: archive data older than N days';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_FILES_COUNT IS
|
||||||
|
'Trigger archival when file count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_BYTES_SUM IS
|
||||||
|
'Trigger archival when total size in bytes exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVE_THRESHOLD_ROWS_COUNT IS
|
||||||
|
'Trigger archival when total row count exceeds this threshold (used in THRESHOLD_BASED and HYBRID strategies)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ODS_SCHEMA_NAME IS
|
||||||
|
'Schema name where ODS external tables are created (typically ODS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.HOURS_TO_EXPIRE_STATISTICS IS
|
||||||
|
'Number of hours before table statistics expire and need to be recalculated';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ARCHIVAL_STRATEGY IS
|
||||||
|
'Archival strategy: THRESHOLD_BASED (days-based), MINIMUM_AGE_MONTHS (0=current month, N=retain N months), HYBRID (combination)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.MINIMUM_AGE_MONTHS IS
|
||||||
|
'Minimum age in months before archival (required for MINIMUM_AGE_MONTHS and HYBRID strategies, 0=current month only)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING IS
|
||||||
|
'Oracle character set name for CSV files (e.g., UTF8, WE8MSWIN1252, EE8ISO8859P2)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_ARCHIVE_ENABLED IS
|
||||||
|
'Y=Enable archiving, N=Skip archiving. Controls if table participates in archival process';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH IS
|
||||||
|
'Y=Keep files in TRASH after archiving, N=Delete immediately. Controls TRASH retention policy';
|
||||||
|
|
||||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_CONFIG TO MRDS_LOADER_ROLE;
|
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_CONFIG TO MRDS_LOADER_ROLE;
|
||||||
@@ -26,4 +26,41 @@ CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
|||||||
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||||
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||||
|
|
||||||
|
-- Column comments
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY IS
|
||||||
|
'Primary key - unique identifier for received file record';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY IS
|
||||||
|
'Foreign key to A_SOURCE_FILE_CONFIG - links file to its configuration and processing rules';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME IS
|
||||||
|
'Full object name/path of the received file in OCI Object Storage (includes INBOX prefix)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CHECKSUM IS
|
||||||
|
'MD5 checksum of file content for integrity verification and duplicate detection';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.CREATED IS
|
||||||
|
'Timestamp with timezone when file was created/uploaded to Object Storage (from DBMS_CLOUD.LIST_OBJECTS)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.BYTES IS
|
||||||
|
'File size in bytes';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE IS
|
||||||
|
'Date when file was registered in the system (extracted from CREATED timestamp)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS IS
|
||||||
|
'Current processing status: RECEIVED → VALIDATED → READY_FOR_INGESTION → INGESTED → ARCHIVED_AND_TRASHED → ARCHIVED_AND_PURGED';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME IS
|
||||||
|
'Name of temporary external table created for file validation (dropped after validation)';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_YEAR IS
|
||||||
|
'Year partition value (YYYY format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PARTITION_MONTH IS
|
||||||
|
'Month partition value (MM format) when file was archived to ARCHIVE bucket with Hive-style partitioning';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.ARCH_FILE_NAME IS
|
||||||
|
'Archive directory prefix in ARCHIVE bucket containing archived Parquet files (supports multiple files from parallel DBMS_CLOUD.EXPORT_DATA)';
|
||||||
|
|
||||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||||
@@ -21,7 +21,7 @@ AS
|
|||||||
CASE pSourceFileConfig.ARCHIVAL_STRATEGY
|
CASE pSourceFileConfig.ARCHIVAL_STRATEGY
|
||||||
-- Legacy threshold-based strategy (backward compatible)
|
-- Legacy threshold-based strategy (backward compatible)
|
||||||
WHEN 'THRESHOLD_BASED' THEN
|
WHEN 'THRESHOLD_BASED' THEN
|
||||||
vWhereClause := 'extract(day from (systimestamp - workflow_start)) > ' || pSourceFileConfig.DAYS_FOR_ARCHIVE_THRESHOLD;
|
vWhereClause := 'extract(day from (systimestamp - workflow_start)) > ' || pSourceFileConfig.ARCHIVE_THRESHOLD_DAYS;
|
||||||
|
|
||||||
-- Archive data older than X months (0 = current month only)
|
-- Archive data older than X months (0 = current month only)
|
||||||
WHEN 'MINIMUM_AGE_MONTHS' THEN
|
WHEN 'MINIMUM_AGE_MONTHS' THEN
|
||||||
@@ -113,15 +113,15 @@ AS
|
|||||||
vSourceFileConfig := CT_MRDS.FILE_MANAGER.GET_SOURCE_FILE_CONFIG(pSourceFileConfigKey => pSourceFileConfigKey);
|
vSourceFileConfig := CT_MRDS.FILE_MANAGER.GET_SOURCE_FILE_CONFIG(pSourceFileConfigKey => pSourceFileConfigKey);
|
||||||
|
|
||||||
-- Check if archiving is enabled for this configuration
|
-- Check if archiving is enabled for this configuration
|
||||||
IF vSourceFileConfig.ARCHIVE_ENABLED = 'N' THEN
|
IF vSourceFileConfig.IS_ARCHIVE_ENABLED = 'N' THEN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archiving disabled for this configuration (ARCHIVE_ENABLED=N). Skipping.', 'WARNING', vParameters);
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archiving disabled for this configuration (IS_ARCHIVE_ENABLED=N). Skipping.', 'WARNING', vParameters);
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||||
RETURN;
|
RETURN;
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Get TRASH policy from configuration
|
-- Get TRASH policy from configuration
|
||||||
vKeepInTrash := (vSourceFileConfig.KEEP_IN_TRASH = 'Y');
|
vKeepInTrash := (vSourceFileConfig.IS_KEEP_IN_TRASH = 'Y');
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('TRASH policy from config: KEEP_IN_TRASH=' || vSourceFileConfig.KEEP_IN_TRASH, 'INFO', vParameters);
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('TRASH policy from config: IS_KEEP_IN_TRASH=' || vSourceFileConfig.IS_KEEP_IN_TRASH, 'INFO', vParameters);
|
||||||
|
|
||||||
vTableStat := GET_TABLE_STAT(pSourceFileConfigKey => pSourceFileConfigKey);
|
vTableStat := GET_TABLE_STAT(pSourceFileConfigKey => pSourceFileConfigKey);
|
||||||
|
|
||||||
@@ -142,9 +142,9 @@ AS
|
|||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archival strategy: MINIMUM_AGE_MONTHS (threshold-independent)','INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archival strategy: MINIMUM_AGE_MONTHS (threshold-independent)','INFO');
|
||||||
ELSE
|
ELSE
|
||||||
-- THRESHOLD_BASED and HYBRID: Check thresholds
|
-- THRESHOLD_BASED and HYBRID: Check thresholds
|
||||||
if vTableStat.OVER_ARCH_THRESOLD_FILE_COUNT >= vSourceFileConfig.FILES_COUNT_OVER_ARCHIVE_THRESHOLD then vArchivalTriggeredBy := 'FILES_COUNT';
|
if vTableStat.OVER_ARCH_THRESOLD_FILE_COUNT >= vSourceFileConfig.ARCHIVE_THRESHOLD_FILES_COUNT then vArchivalTriggeredBy := 'FILES_COUNT';
|
||||||
elsif vTableStat.OVER_ARCH_THRESOLD_ROW_COUNT >= vSourceFileConfig.ROWS_COUNT_OVER_ARCHIVE_THRESHOLD then vArchivalTriggeredBy := vArchivalTriggeredBy||', ROWS_COUNT';
|
elsif vTableStat.OVER_ARCH_THRESOLD_ROW_COUNT >= vSourceFileConfig.ARCHIVE_THRESHOLD_ROWS_COUNT then vArchivalTriggeredBy := vArchivalTriggeredBy||', ROWS_COUNT';
|
||||||
elsif vTableStat.OVER_ARCH_THRESOLD_SIZE >= vSourceFileConfig.BYTES_SUM_OVER_ARCHIVE_THRESHOLD then vArchivalTriggeredBy := vArchivalTriggeredBy||', BYTES_SUM';
|
elsif vTableStat.OVER_ARCH_THRESOLD_SIZE >= vSourceFileConfig.ARCHIVE_THRESHOLD_BYTES_SUM then vArchivalTriggeredBy := vArchivalTriggeredBy||', BYTES_SUM';
|
||||||
else CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Non of archival triggers reached','INFO');
|
else CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Non of archival triggers reached','INFO');
|
||||||
end if;
|
end if;
|
||||||
END IF;
|
END IF;
|
||||||
@@ -166,6 +166,7 @@ AS
|
|||||||
join CT_MRDS.a_workflow_history h
|
join CT_MRDS.a_workflow_history h
|
||||||
on s.a_workflow_history_key = h.a_workflow_history_key
|
on s.a_workflow_history_key = h.a_workflow_history_key
|
||||||
where ' || GET_ARCHIVAL_WHERE_CLAUSE(vSourceFileConfig) || '
|
where ' || GET_ARCHIVAL_WHERE_CLAUSE(vSourceFileConfig) || '
|
||||||
|
and h.WORKFLOW_SUCCESSFUL = ''Y''
|
||||||
group by file$name, file$path, to_char(h.workflow_start,''yyyy''), to_char(h.workflow_start,''mm'')'
|
group by file$name, file$path, to_char(h.workflow_start,''yyyy''), to_char(h.workflow_start,''mm'')'
|
||||||
;
|
;
|
||||||
|
|
||||||
@@ -182,11 +183,11 @@ AS
|
|||||||
join CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
join CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
||||||
on s.file$name = r.source_file_name
|
on s.file$name = r.source_file_name
|
||||||
and r.a_source_file_config_key = '||pSourceFileConfigKey||'
|
and r.a_source_file_config_key = '||pSourceFileConfigKey||'
|
||||||
and r.PROCESSING_STATUS = ''INGESTED''
|
|
||||||
join CT_MRDS.a_workflow_history h
|
join CT_MRDS.a_workflow_history h
|
||||||
on s.a_workflow_history_key = h.a_workflow_history_key
|
on s.a_workflow_history_key = h.a_workflow_history_key
|
||||||
and to_char(h.workflow_start,''yyyy'') = '''||ym_loop.year||'''
|
and to_char(h.workflow_start,''yyyy'') = '''||ym_loop.year||'''
|
||||||
and to_char(h.workflow_start,''mm'') = '''||ym_loop.month||'''
|
and to_char(h.workflow_start,''mm'') = '''||ym_loop.month||'''
|
||||||
|
and h.WORKFLOW_SUCCESSFUL = ''Y''
|
||||||
'
|
'
|
||||||
;
|
;
|
||||||
vUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE')||'ARCHIVE/'||vSourceFileConfig.A_SOURCE_KEY||'/'||vSourceFileConfig.TABLE_ID||'/PARTITION_YEAR='||ym_loop.year||'/PARTITION_MONTH='||ym_loop.month||'/';
|
vUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE')||'ARCHIVE/'||vSourceFileConfig.A_SOURCE_KEY||'/'||vSourceFileConfig.TABLE_ID||'/PARTITION_YEAR='||ym_loop.year||'/PARTITION_MONTH='||ym_loop.month||'/';
|
||||||
@@ -296,10 +297,10 @@ AS
|
|||||||
AND r.source_file_name = f.filename
|
AND r.source_file_name = f.filename
|
||||||
AND r.PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
AND r.PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED';
|
||||||
END LOOP;
|
END LOOP;
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('All archived files removed from TRASH folder and marked as ARCHIVED_AND_PURGED (config: KEEP_IN_TRASH=N).','INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('All archived files removed from TRASH folder and marked as ARCHIVED_AND_PURGED (config: IS_KEEP_IN_TRASH=N).','INFO');
|
||||||
ELSE
|
ELSE
|
||||||
-- Keep files in TRASH folder (status remains ARCHIVED_AND_TRASHED)
|
-- Keep files in TRASH folder (status remains ARCHIVED_AND_TRASHED)
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archived files kept in TRASH folder for retention (config: KEEP_IN_TRASH=Y, status: ARCHIVED_AND_TRASHED).','INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Archived files kept in TRASH folder for retention (config: IS_KEEP_IN_TRASH=Y, status: ARCHIVED_AND_TRASHED).','INFO');
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
--ROLLBACK PART
|
--ROLLBACK PART
|
||||||
@@ -483,7 +484,7 @@ AS
|
|||||||
,sum(case when ' || vWhereClause || ' then row_count_per_file else 0 end) as OLD_ROW_COUNT
|
,sum(case when ' || vWhereClause || ' then row_count_per_file else 0 end) as OLD_ROW_COUNT
|
||||||
,sum(r.bytes) as BYTES
|
,sum(r.bytes) as BYTES
|
||||||
,sum(case when ' || vWhereClause || ' then r.bytes else 0 end) as OLD_BYTES
|
,sum(case when ' || vWhereClause || ' then r.bytes else 0 end) as OLD_BYTES
|
||||||
,'||COALESCE(TO_CHAR(vSourceFileConfig.DAYS_FOR_ARCHIVE_THRESHOLD), 'NULL')||' as DAYS_FOR_ARCHIVE_THRESHOLD
|
,'||COALESCE(TO_CHAR(vSourceFileConfig.ARCHIVE_THRESHOLD_DAYS), 'NULL')||' as ARCHIVE_THRESHOLD_DAYS
|
||||||
,systimestamp as CREATED
|
,systimestamp as CREATED
|
||||||
from tmp_gr t
|
from tmp_gr t
|
||||||
join (SELECT * from DBMS_CLOUD.LIST_OBJECTS(
|
join (SELECT * from DBMS_CLOUD.LIST_OBJECTS(
|
||||||
@@ -1041,8 +1042,8 @@ AS
|
|||||||
SELECT
|
SELECT
|
||||||
A_SOURCE_FILE_CONFIG_KEY,
|
A_SOURCE_FILE_CONFIG_KEY,
|
||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
KEEP_IN_TRASH,
|
IS_KEEP_IN_TRASH,
|
||||||
A_SOURCE_KEY
|
A_SOURCE_KEY
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
@@ -1058,16 +1059,16 @@ AS
|
|||||||
)
|
)
|
||||||
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
||||||
) LOOP
|
) LOOP
|
||||||
IF config_rec.ARCHIVE_ENABLED = 'N' THEN
|
IF config_rec.IS_ARCHIVE_ENABLED = 'N' THEN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Skipping table ' || config_rec.TABLE_ID || ' (ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
'Skipping table ' || config_rec.TABLE_ID || ' (IS_ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
vTablesSkipped := vTablesSkipped + 1;
|
vTablesSkipped := vTablesSkipped + 1;
|
||||||
ELSE
|
ELSE
|
||||||
BEGIN
|
BEGIN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Archiving table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', KEEP_IN_TRASH=' || config_rec.KEEP_IN_TRASH || ']',
|
'Archiving table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', IS_KEEP_IN_TRASH=' || config_rec.IS_KEEP_IN_TRASH || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -1174,14 +1175,14 @@ AS
|
|||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Set enabled filter info
|
-- Set enabled filter info
|
||||||
vEnabledFilter := CASE WHEN pOnlyEnabled THEN 'ARCHIVE_ENABLED=Y only' ELSE 'All tables' END;
|
vEnabledFilter := CASE WHEN pOnlyEnabled THEN 'IS_ARCHIVE_ENABLED=Y only' ELSE 'All tables' END;
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Filter mode: ' || vEnabledFilter, 'INFO');
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Filter mode: ' || vEnabledFilter, 'INFO');
|
||||||
|
|
||||||
FOR config_rec IN (
|
FOR config_rec IN (
|
||||||
SELECT
|
SELECT
|
||||||
A_SOURCE_FILE_CONFIG_KEY,
|
A_SOURCE_FILE_CONFIG_KEY,
|
||||||
TABLE_ID,
|
TABLE_ID,
|
||||||
ARCHIVE_ENABLED,
|
IS_ARCHIVE_ENABLED,
|
||||||
A_SOURCE_KEY
|
A_SOURCE_KEY
|
||||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
@@ -1195,20 +1196,20 @@ AS
|
|||||||
-- Level 3: All configs when pGatherAll = TRUE
|
-- Level 3: All configs when pGatherAll = TRUE
|
||||||
(pSourceFileConfigKey IS NULL AND pSourceKey IS NULL AND pGatherAll = TRUE)
|
(pSourceFileConfigKey IS NULL AND pSourceKey IS NULL AND pGatherAll = TRUE)
|
||||||
)
|
)
|
||||||
-- Apply ARCHIVE_ENABLED filter if pOnlyEnabled = TRUE
|
-- Apply IS_ARCHIVE_ENABLED filter if pOnlyEnabled = TRUE
|
||||||
AND (pOnlyEnabled = FALSE OR ARCHIVE_ENABLED = 'Y')
|
AND (pOnlyEnabled = FALSE OR IS_ARCHIVE_ENABLED = 'Y')
|
||||||
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
ORDER BY A_SOURCE_KEY, A_SOURCE_FILE_CONFIG_KEY
|
||||||
) LOOP
|
) LOOP
|
||||||
IF pOnlyEnabled AND config_rec.ARCHIVE_ENABLED = 'N' THEN
|
IF pOnlyEnabled AND config_rec.IS_ARCHIVE_ENABLED = 'N' THEN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Skipping table ' || config_rec.TABLE_ID || ' (ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
'Skipping table ' || config_rec.TABLE_ID || ' (IS_ARCHIVE_ENABLED=N) [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
vTablesSkipped := vTablesSkipped + 1;
|
vTablesSkipped := vTablesSkipped + 1;
|
||||||
ELSE
|
ELSE
|
||||||
BEGIN
|
BEGIN
|
||||||
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Gathering statistics for table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', ARCHIVE_ENABLED=' || config_rec.ARCHIVE_ENABLED || ']',
|
'Gathering statistics for table ' || config_rec.TABLE_ID || ' [Source: ' || config_rec.A_SOURCE_KEY || ', Config: ' || config_rec.A_SOURCE_FILE_CONFIG_KEY || ', IS_ARCHIVE_ENABLED=' || config_rec.IS_ARCHIVE_ENABLED || ']',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ AS
|
|||||||
|
|
||||||
-- Version History (Latest changes first)
|
-- Version History (Latest changes first)
|
||||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
'3.3.0 (2026-02-11): Added ARCHIVE_ENABLED and KEEP_IN_TRASH columns to A_SOURCE_FILE_CONFIG for selective archiving and config-based TRASH policy. Removed pKeepInTrash parameter (now from config). Added ARCHIVE_ALL batch procedure with 3-level granularity (config/source/all). Added GATHER_TABLE_STAT_ALL batch statistics procedure with 3-level granularity. Added RESTORE_FILE_FROM_TRASH and PURGE_TRASH_FOLDER with 3-level granularity' || CHR(13)||CHR(10) ||
|
'3.3.0 (2026-02-11): Added IS_ARCHIVE_ENABLED and IS_KEEP_IN_TRASH columns to A_SOURCE_FILE_CONFIG for selective archiving and config-based TRASH policy. Removed pKeepInTrash parameter (now from config). Added ARCHIVE_ALL batch procedure with 3-level granularity (config/source/all). Added GATHER_TABLE_STAT_ALL batch statistics procedure with 3-level granularity. Added RESTORE_FILE_FROM_TRASH and PURGE_TRASH_FOLDER with 3-level granularity' || CHR(13)||CHR(10) ||
|
||||||
'3.2.1 (2026-02-10): Fixed status update - ARCHIVED → ARCHIVED_AND_TRASHED when moving files to TRASH folder (critical bug fix)' || CHR(13)||CHR(10) ||
|
'3.2.1 (2026-02-10): Fixed status update - ARCHIVED → ARCHIVED_AND_TRASHED when moving files to TRASH folder (critical bug fix)' || CHR(13)||CHR(10) ||
|
||||||
'3.2.0 (2026-02-06): Added pKeepInTrash parameter (DEFAULT TRUE) to ARCHIVE_TABLE_DATA for TRASH folder retention control - files kept in TRASH subfolder (DATA bucket) by default for safety and compliance' || CHR(13)||CHR(10) ||
|
'3.2.0 (2026-02-06): Added pKeepInTrash parameter (DEFAULT TRUE) to ARCHIVE_TABLE_DATA for TRASH folder retention control - files kept in TRASH subfolder (DATA bucket) by default for safety and compliance' || CHR(13)||CHR(10) ||
|
||||||
'3.1.2 (2026-02-06): Fixed missing PARTITION_YEAR/PARTITION_MONTH assignments in UPDATE statement and export query circular dependency (now filters by workflow_start instead of partition fields)' || CHR(13)||CHR(10) ||
|
'3.1.2 (2026-02-06): Fixed missing PARTITION_YEAR/PARTITION_MONTH assignments in UPDATE statement and export query circular dependency (now filters by workflow_start instead of partition fields)' || CHR(13)||CHR(10) ||
|
||||||
@@ -51,7 +51,7 @@ AS
|
|||||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||||
* Exports data from table specified by pSourceFileConfigKey(A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY) into PARQUET file on OCI infrustructure.
|
* Exports data from table specified by pSourceFileConfigKey(A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY) into PARQUET file on OCI infrustructure.
|
||||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||||
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
||||||
**/
|
**/
|
||||||
PROCEDURE ARCHIVE_TABLE_DATA (
|
PROCEDURE ARCHIVE_TABLE_DATA (
|
||||||
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
||||||
@@ -62,7 +62,7 @@ AS
|
|||||||
* @desc Function wrapper for ARCHIVE_TABLE_DATA procedure.
|
* @desc Function wrapper for ARCHIVE_TABLE_DATA procedure.
|
||||||
* Returns SQLCODE for Python library integration.
|
* Returns SQLCODE for Python library integration.
|
||||||
* Calls the main ARCHIVE_TABLE_DATA procedure and captures execution result.
|
* Calls the main ARCHIVE_TABLE_DATA procedure and captures execution result.
|
||||||
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
* TRASH policy is controlled by A_SOURCE_FILE_CONFIG.IS_KEEP_IN_TRASH column ('Y'=keep in TRASH, 'N'=delete immediately).
|
||||||
* @example SELECT FILE_ARCHIVER.FN_ARCHIVE_TABLE_DATA(pSourceFileConfigKey => 123) FROM DUAL;
|
* @example SELECT FILE_ARCHIVER.FN_ARCHIVE_TABLE_DATA(pSourceFileConfigKey => 123) FROM DUAL;
|
||||||
* @ex_rslt 0 (success) or error code
|
* @ex_rslt 0 (success) or error code
|
||||||
**/
|
**/
|
||||||
@@ -96,16 +96,16 @@ AS
|
|||||||
/**
|
/**
|
||||||
* @name GATHER_TABLE_STAT_ALL
|
* @name GATHER_TABLE_STAT_ALL
|
||||||
* @desc Multi-level batch statistics gathering procedure with three granularity levels.
|
* @desc Multi-level batch statistics gathering procedure with three granularity levels.
|
||||||
* Processes configurations based on ARCHIVE_ENABLED setting (when pOnlyEnabled=TRUE).
|
* Processes configurations based on IS_ARCHIVE_ENABLED setting (when pOnlyEnabled=TRUE).
|
||||||
* Gathers statistics for external tables and inserts data into A_TABLE_STAT and A_TABLE_STAT_HIST.
|
* Gathers statistics for external tables and inserts data into A_TABLE_STAT and A_TABLE_STAT_HIST.
|
||||||
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
||||||
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (e.g., 'LM', 'C2D') (medium priority)
|
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (e.g., 'LM', 'C2D') (medium priority)
|
||||||
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
||||||
* @param pOnlyEnabled - When TRUE (default), only process tables with ARCHIVE_ENABLED='Y'
|
* @param pOnlyEnabled - When TRUE (default), only process tables with IS_ARCHIVE_ENABLED='Y'
|
||||||
* @example -- Level 1: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceFileConfigKey => 123);
|
* @example -- Level 1: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceFileConfigKey => 123);
|
||||||
* @example -- Level 2: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceKey => 'LM');
|
* @example -- Level 2: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pSourceKey => 'LM');
|
||||||
* @example -- Level 3: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE);
|
* @example -- Level 3: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE);
|
||||||
* @example -- All tables regardless of ARCHIVE_ENABLED: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE, pOnlyEnabled => FALSE);
|
* @example -- All tables regardless of IS_ARCHIVE_ENABLED: CALL FILE_ARCHIVER.GATHER_TABLE_STAT_ALL(pGatherAll => TRUE, pOnlyEnabled => FALSE);
|
||||||
**/
|
**/
|
||||||
PROCEDURE GATHER_TABLE_STAT_ALL (
|
PROCEDURE GATHER_TABLE_STAT_ALL (
|
||||||
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE DEFAULT NULL,
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE DEFAULT NULL,
|
||||||
@@ -122,7 +122,7 @@ AS
|
|||||||
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
* @param pSourceFileConfigKey - (LEVEL 1) Gather stats for specific configuration key (highest priority)
|
||||||
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (medium priority)
|
* @param pSourceKey - (LEVEL 2) Gather stats for all tables in source system (medium priority)
|
||||||
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
* @param pGatherAll - (LEVEL 3) When TRUE, gather stats for ALL tables across all sources (lowest priority)
|
||||||
* @param pOnlyEnabled - When TRUE (default), only process tables with ARCHIVE_ENABLED='Y'
|
* @param pOnlyEnabled - When TRUE (default), only process tables with IS_ARCHIVE_ENABLED='Y'
|
||||||
* @example SELECT FILE_ARCHIVER.FN_GATHER_TABLE_STAT_ALL(pSourceKey => 'LM') FROM DUAL;
|
* @example SELECT FILE_ARCHIVER.FN_GATHER_TABLE_STAT_ALL(pSourceKey => 'LM') FROM DUAL;
|
||||||
* @ex_rslt 0 (success) or error code
|
* @ex_rslt 0 (success) or error code
|
||||||
**/
|
**/
|
||||||
@@ -136,8 +136,8 @@ AS
|
|||||||
/**
|
/**
|
||||||
* @name ARCHIVE_ALL
|
* @name ARCHIVE_ALL
|
||||||
* @desc Multi-level batch archival procedure with three granularity levels.
|
* @desc Multi-level batch archival procedure with three granularity levels.
|
||||||
* Only processes configurations where ARCHIVE_ENABLED='Y'.
|
* Only processes configurations where IS_ARCHIVE_ENABLED='Y'.
|
||||||
* TRASH policy for each table is controlled by individual KEEP_IN_TRASH column.
|
* TRASH policy for each table is controlled by individual IS_KEEP_IN_TRASH column.
|
||||||
* @param pSourceFileConfigKey - (LEVEL 1) Archive specific configuration key (highest priority)
|
* @param pSourceFileConfigKey - (LEVEL 1) Archive specific configuration key (highest priority)
|
||||||
* @param pSourceKey - (LEVEL 2) Archive all enabled tables for source system (e.g., 'LM', 'C2D') (medium priority)
|
* @param pSourceKey - (LEVEL 2) Archive all enabled tables for source system (e.g., 'LM', 'C2D') (medium priority)
|
||||||
* @param pArchiveAll - (LEVEL 3) When TRUE, archive ALL enabled tables across all sources (lowest priority)
|
* @param pArchiveAll - (LEVEL 3) When TRUE, archive ALL enabled tables across all sources (lowest priority)
|
||||||
|
|||||||
2009
MARS_Packages/REL01_ADDITIONS/MARS-828/new_version/FILE_MANAGER.pkb
Normal file
2009
MARS_Packages/REL01_ADDITIONS/MARS-828/new_version/FILE_MANAGER.pkb
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,639 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.FILE_MANAGER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||||
|
* It is a standard.
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Example comment:
|
||||||
|
/**
|
||||||
|
* @name EX_PROCEDURE_NAME
|
||||||
|
* @desc Procedure description
|
||||||
|
* @example select FILE_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||||
|
* @ex_rslt Example Result
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.5.1';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-24 13:35:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.5.1 (2026-02-24): Fixed TIMESTAMP field syntax in GENERATE_EXTERNAL_TABLE_PARAMS for SQL*Loader compatibility (CHAR(35) DATE_FORMAT TIMESTAMP MASK format)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.2 (2026-02-20): MARS-828 - Fixed threshold column names in GET_DET_SOURCE_FILE_CONFIG_INFO for MARS-828 compatibility' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.1 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.0 (2025-10-22): Added package versioning system using centralized ENV_MANAGER functions' || CHR(13)||CHR(10) ||
|
||||||
|
'3.1.0 (2025-10-20): Enhanced PROCESS_SOURCE_FILE with 6-step validation workflow' || CHR(13)||CHR(10) ||
|
||||||
|
'3.0.0 (2025-10-15): Separated export procedures into dedicated DATA_EXPORTER package' || CHR(13)||CHR(10) ||
|
||||||
|
'2.5.0 (2025-10-10): Added DELETE_SOURCE_CASCADE for safe configuration removal' || CHR(13)||CHR(10) ||
|
||||||
|
'2.0.0 (2025-09-25): Added official path patterns support (INBOX 3-level, ODS 2-level, ARCHIVE 2-level)' || CHR(13)||CHR(10) ||
|
||||||
|
'1.0.0 (2025-09-01): Initial release with file processing and validation capabilities';
|
||||||
|
|
||||||
|
TYPE tSourceFileReceived IS RECORD
|
||||||
|
(
|
||||||
|
A_SOURCE_FILE_RECEIVED_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE,
|
||||||
|
A_SOURCE_FILE_CONFIG_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY%TYPE,
|
||||||
|
SOURCE_FILE_PREFIX_INBOX VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ODS VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_QUARANTINE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ARCHIVE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME%TYPE,
|
||||||
|
RECEPTION_DATE CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE%TYPE,
|
||||||
|
PROCESSING_STATUS CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS%TYPE,
|
||||||
|
EXTERNAL_TABLE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgSourceFileConfigKey PLS_INTEGER;
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Get source file type by matching the source file name against source file type naming patterns
|
||||||
|
* or by specifying the id of a received source file.
|
||||||
|
* @example ...
|
||||||
|
* @ex_rslt "CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE"
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG(pFileUri IN VARCHAR2 DEFAULT NULL
|
||||||
|
, pSourceFileReceivedKey IN NUMBER DEFAULT NULL
|
||||||
|
, pSourceFileConfigKey IN NUMBER DEFAULT NULL)
|
||||||
|
RETURN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a newly received source file in A_SOURCE_FILE_RECEIVED table.
|
||||||
|
* This overload automatically determines source file type from the file name.
|
||||||
|
* It returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a new new source file in A_SOURCE_FILE_RECEIVED table based on pSourceFileReceivedName and pSourceFileConfig.
|
||||||
|
* Then it returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(
|
||||||
|
* pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv'
|
||||||
|
* ,pSourceFileConfig => ...A_SOURCE_FILE_CONFIG%ROWTYPE... );
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2,
|
||||||
|
pSourceFileConfig IN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name SET_SOURCE_FILE_RECEIVED_STATUS
|
||||||
|
* @desc Set status of file in A_SOURCE_FILE_RECEIVED table - PROCESSING_STATUS column
|
||||||
|
* based on A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY
|
||||||
|
* and provided value of pStatus parameter
|
||||||
|
* @example exec FILE_MANAGER.SET_SOURCE_FILE_RECEIVED_STATUS(pSourceFileReceivedKey => 377, pStatus => 'READY_FOR_INGESTION');
|
||||||
|
**/
|
||||||
|
PROCEDURE SET_SOURCE_FILE_RECEIVED_STATUS(
|
||||||
|
pSourceFileReceivedKey IN PLS_INTEGER,
|
||||||
|
pStatus IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_EXTERNAL_TABLE_COLUMNS
|
||||||
|
* @desc Function used to get string with all table columns definitions based on pTargetTableTemplate "TEMPLATE TABLE" name.
|
||||||
|
* It used for creating "EXTERNAL TABLE" using CREATE_EXTERNAL_TABLE procedure.
|
||||||
|
* @example select FILE_MANAGER.GET_EXTERNAL_TABLE_COLUMNS(pTargetTableTemplate => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER') from dual;
|
||||||
|
* @ex_rslt "A_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "A_WORKFLOW_HISTORY_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "REV_NUMBER" NUMBER(28,0),
|
||||||
|
* "REF_DATE" DATE,
|
||||||
|
* "FREE_TEXT" VARCHAR2(1000 CHAR),
|
||||||
|
* "MLF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "MLF_SF_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_SF_TOTAL" NUMBER(28,10)
|
||||||
|
**/
|
||||||
|
FUNCTION GET_EXTERNAL_TABLE_COLUMNS (
|
||||||
|
pTargetTableTemplate IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN CLOB;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.CREATE_EXTERNAL_TABLE which creates External Table
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252')
|
||||||
|
* If provided, adds CHARACTERSET clause to external table definition
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLE(
|
||||||
|
* pTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER',
|
||||||
|
* pPrefix => 'ODS/LM/STANDING_FACILITIES_HEADER/',
|
||||||
|
* pBucketUri => 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/',
|
||||||
|
* pFileName => NULL,
|
||||||
|
* pDelimiter => ',',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pPrefix IN VARCHAR2,
|
||||||
|
pBucketUri IN VARCHAR2 DEFAULT ENV_MANAGER.gvInboxBucketUri,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pDelimiter IN VARCHAR2 DEFAULT ',',
|
||||||
|
pEncoding IN VARCHAR2 DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc Creates External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.CREATE_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE
|
||||||
|
* It validate External table build upon single file
|
||||||
|
* provided by pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.VALIDATE_SOURCE_FILE_RECEIVED(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper function for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE.
|
||||||
|
* It validates External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt FAILED
|
||||||
|
**/
|
||||||
|
FUNCTION VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name S_VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A function which checks if SELECT query reterns any rows.
|
||||||
|
* It trys to selects External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.S_VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt PASSED
|
||||||
|
**/
|
||||||
|
FUNCTION S_VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DROP_EXTERNAL_TABLE
|
||||||
|
* @desc It drops External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.DROP_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE DROP_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name COPY_FILE
|
||||||
|
* @desc It copies file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS'
|
||||||
|
* @example exec FILE_MANAGER.COPY_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE COPY_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name MOVE_FILE
|
||||||
|
* @desc It moves file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS', 'QUARANTINE'
|
||||||
|
* @example exec FILE_MANAGER.MOVE_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE MOVE_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_FOLDER_CONTENTS
|
||||||
|
* @desc It deletes all files from specified folder in the cloud storage.
|
||||||
|
* The procedure lists all objects in the specified folder prefix and deletes them one by one.
|
||||||
|
* pBucketArea parameter specifies which bucket to use: 'INBOX', 'DATA', 'ARCHIVE'
|
||||||
|
* pFolderPrefix parameter specifies the folder path within the bucket (e.g., 'C2D/UC_DISSEM/UC_NMA_DISSEM/')
|
||||||
|
* @example exec FILE_MANAGER.DELETE_FOLDER_CONTENTS(pBucketArea => 'INBOX', pFolderPrefix => 'C2D/UC_DISSEM/UC_NMA_DISSEM/');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_FOLDER_CONTENTS(
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderPrefix IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter.
|
||||||
|
* Ubmrella procedure that calls:
|
||||||
|
* - REGISTER_SOURCE_FILE_RECEIVED;
|
||||||
|
* - CREATE_EXTERNAL_TABLE;
|
||||||
|
* - VALIDATE_SOURCE_FILE_RECEIVED;
|
||||||
|
* - DROP_EXTERNAL_TABLE;
|
||||||
|
* - MOVE_FILE;
|
||||||
|
* @example exec FILE_MANAGER.PROCESS_SOURCE_FILE(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
**/
|
||||||
|
PROCEDURE PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter and return processing result value.
|
||||||
|
* It returns (success/failure) => 0 / -(value).
|
||||||
|
* Ubmrella function that calls PROCESS_SOURCE_FILE procedure.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vResult PLS_INTEGER;
|
||||||
|
* begin
|
||||||
|
* vResult := CT_MRDS.FILE_MANAGER.PROCESS_SOURCE_FILE(PSOURCEFILERECEIVEDNAME => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vResult = ' || vResult);
|
||||||
|
* end;
|
||||||
|
* @ex_rslt 0
|
||||||
|
* -20021
|
||||||
|
**/
|
||||||
|
FUNCTION PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DATE_FORMAT
|
||||||
|
* @desc Returns date format for specified template table name and column name.
|
||||||
|
* Date is taken from configuration A_COLUMN_DATE_FORMAT table.
|
||||||
|
* @example select FILE_MANAGER.GET_DATE_FORMAT(
|
||||||
|
* pTemplateTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pColumnName => 'SNAPSHOT_DATE')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt DD/MM/YYYY HH24:MI:SS
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DATE_FORMAT(
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnName IN VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GENERATE_EXTERNAL_TABLE_PARAMS
|
||||||
|
* @desc It builds two strings: pColumnList and pFieldList for specified Template Table name, by parameter: pTemplateTableName.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vColumnList CLOB;
|
||||||
|
* vFieldList CLOB;
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER'
|
||||||
|
* ,pColumnList => vColumnList
|
||||||
|
* ,pFieldList => vFieldList
|
||||||
|
* );
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vColumnList = '||vColumnList);
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vFieldList = '||vFieldList);
|
||||||
|
* end;
|
||||||
|
* /
|
||||||
|
**/
|
||||||
|
PROCEDURE GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnList OUT CLOB,
|
||||||
|
pFieldList OUT CLOB
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE
|
||||||
|
* @desc Insert a new record to A_SOURCE table.
|
||||||
|
* pSourceKey is a PRIMARY KEY value.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE,
|
||||||
|
pSourceName IN CT_MRDS.A_SOURCE.SOURCE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_SOURCE_CASCADE
|
||||||
|
* @desc Safely deletes a SOURCE specified by pSourceKey parameter from A_SOURCE table and all dependent tables:
|
||||||
|
* - A_SOURCE_FILE_CONFIG
|
||||||
|
* - A_SOURCE_FILE_RECEIVED
|
||||||
|
* - A_COLUMN_DATE_FORMAT (only if template table is not shared with other source systems)
|
||||||
|
* The procedure checks if template tables are shared before deleting date format configurations.
|
||||||
|
* If a template table is used by multiple source systems, date formats are preserved.
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.DELETE_SOURCE_CASCADE(pSourceKey => 'TEST_SYS');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_SOURCE_CASCADE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_CONTAINER_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified parameter pSourceFileId (A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID)
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY for related CONTAINER record.
|
||||||
|
* @example select FILE_MANAGER.GET_CONTAINER_SOURCE_FILE_CONFIG_KEY(
|
||||||
|
* pSourceFileId => 'UC_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_CONTAINER_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified input parameters,
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY.
|
||||||
|
* @example select FILE_MANAGER.GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
* pSourceFileType => 'INPUT'
|
||||||
|
* ,pSourceFileId => 'UC_DISSEM'
|
||||||
|
* ,pTableId => 'UC_NMA_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE DEFAULT 'INPUT'
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Insert a new record to A_SOURCE_FILE_CONFIG table.
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification.
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252', 'EE8ISO8859P2')
|
||||||
|
* If NULL, no CHARACTERSET clause is added to external table definitions
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.ADD_SOURCE_FILE_CONFIG(
|
||||||
|
* pSourceKey => 'C2D', pSourceFileType => 'INPUT',
|
||||||
|
* pSourceFileId => 'UC_DISSEM', pTableId => 'METADATA_LOADS',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.C2D_A_UC_DISSEM_METADATA_LOADS',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE_FILE_CONFIG (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY%TYPE
|
||||||
|
,pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pSourceFileDesc IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC%TYPE
|
||||||
|
,pSourceFileNamePattern IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE DEFAULT NULL
|
||||||
|
,pTemplateTableName IN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME%TYPE DEFAULT NULL
|
||||||
|
,pContainerFileKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY%TYPE DEFAULT NULL
|
||||||
|
,pEncoding IN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING%TYPE DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_COLUMN_DATE_FORMAT
|
||||||
|
* @desc Insert a new record to A_COLUMN_DATE_FORMAT table.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_COLUMN_DATE_FORMAT (
|
||||||
|
pTemplateTableName IN CT_MRDS.A_COLUMN_DATE_FORMAT.TEMPLATE_TABLE_NAME%TYPE
|
||||||
|
,pColumnName IN CT_MRDS.A_COLUMN_DATE_FORMAT.COLUMN_NAME%TYPE
|
||||||
|
,pDateFormat IN CT_MRDS.A_COLUMN_DATE_FORMAT.DATE_FORMAT%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUCKET_URI
|
||||||
|
* @desc Function used to get string with bucket http url.
|
||||||
|
* Possible input values for pBucketArea are: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example select FILE_MANAGER.GET_BUCKET_URI(pBucketArea => 'ODS') from dual;
|
||||||
|
* @ex_rslt https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUCKET_URI(pBucketArea VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_CONFIG_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_CONFIG record
|
||||||
|
* for specified pSourceFileConfigKey (A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY).
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
* pSourceFileConfigKey => 128
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about File Configuration:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 128
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Details about related Container Config:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 126
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Column Date Format config entries:
|
||||||
|
* --------------------------------
|
||||||
|
* TEMPLATE_TABLE_NAME = CT_ET_TEMPLATES.C2D_UC_MA_DISSEM
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_RECEIVED_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_RECEIVED record
|
||||||
|
* for specified pSourceFileReceivedKey (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY).
|
||||||
|
* If pIncludeConfigInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
* pSourceFileReceivedKey => 377
|
||||||
|
* ,pIncludeConfigInfo => 1
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
*
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
pSourceFileReceivedKey IN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE
|
||||||
|
,pIncludeConfigInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_USER_LOAD_OPERATIONS
|
||||||
|
* @desc Function returns details from USER_LOAD_OPERATIONS table
|
||||||
|
* for specified pOperationId.
|
||||||
|
* @example select FILE_MANAGER.GET_DET_USER_LOAD_OPERATIONS (pOperationId => 3608) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about USER_LOAD_OPERATIONS where ID = 3608
|
||||||
|
* --------------------------------
|
||||||
|
* ID = 3608
|
||||||
|
* TYPE = VALIDATE
|
||||||
|
* SID = 31260
|
||||||
|
* SERIAL# = 52915
|
||||||
|
* START_TIME = 2025-05-20 10.08.24.436983 EUROPE/BELGRADE
|
||||||
|
* UPDATE_TIME = 2025-05-20 10.08.24.458643 EUROPE/BELGRADE
|
||||||
|
* STATUS = FAILED
|
||||||
|
* OWNER_NAME = CT_MRDS
|
||||||
|
* TABLE_NAME = STANDING_FACILITIES_HEADER
|
||||||
|
* PARTITION_NAME =
|
||||||
|
* SUBPARTITION_NAME =
|
||||||
|
* FILE_URI_LIST =
|
||||||
|
* ROWS_LOADED =
|
||||||
|
* LOGFILE_TABLE = VALIDATE$3608_LOG
|
||||||
|
* BADFILE_TABLE = VALIDATE$3608_BAD
|
||||||
|
* STATUS_TABLE =
|
||||||
|
* TEMPEXT_TABLE =
|
||||||
|
* CREDENTIAL_NAME =
|
||||||
|
* EXPIRATION_TIME = 2025-05-22 10.08.24.436983000 EUROPE/BELGRADE
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_USER_LOAD_OPERATIONS (
|
||||||
|
pOperationId PLS_INTEGER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ANALYZE_VALIDATION_ERRORS
|
||||||
|
* @desc Wrapper function that analyzes validation errors for a source file using its received key.
|
||||||
|
* Automatically derives template schema, table name, CSV URI and validation log table
|
||||||
|
* from file metadata and calls ENV_MANAGER.ANALYZE_VALIDATION_ERRORS.
|
||||||
|
* @example SELECT FILE_MANAGER.ANALYZE_VALIDATION_ERRORS(63) FROM DUAL;
|
||||||
|
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||||
|
**/
|
||||||
|
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION
|
||||||
|
* @desc Returns the current version number of the FILE_MANAGER package.
|
||||||
|
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION() FROM DUAL;
|
||||||
|
* @ex_rslt 3.2.0
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUILD_INFO
|
||||||
|
* @desc Returns comprehensive build information including version, build date, and author.
|
||||||
|
* Uses centralized ENV_MANAGER.GET_PACKAGE_VERSION_INFO function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||||
|
* @ex_rslt Package: FILE_MANAGER
|
||||||
|
* Version: 3.2.0
|
||||||
|
* Build Date: 2025-10-22 16:30:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION_HISTORY
|
||||||
|
* @desc Returns complete version history with all releases and changes.
|
||||||
|
* Uses centralized ENV_MANAGER.FORMAT_VERSION_HISTORY function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||||
|
* @ex_rslt FILE_MANAGER Version History:
|
||||||
|
* 3.2.0 (2025-10-22): Added package versioning system...
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
|
|
||||||
|
/
|
||||||
@@ -35,9 +35,12 @@ PROMPT Rollback steps:
|
|||||||
PROMPT 1. Rollback TRASH retention statuses
|
PROMPT 1. Rollback TRASH retention statuses
|
||||||
PROMPT 2. Revoke T_FILENAME privileges
|
PROMPT 2. Revoke T_FILENAME privileges
|
||||||
PROMPT 3. Remove validation trigger
|
PROMPT 3. Remove validation trigger
|
||||||
PROMPT 4. Drop all configuration columns (ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, ARCHIVE_ENABLED, KEEP_IN_TRASH)
|
PROMPT 4. Remove column comments (OPTIONAL - does not affect functionality)
|
||||||
PROMPT 5. Restore FILE_ARCHIVER package to v2.0.0
|
PROMPT 5. Revert threshold column renames (restore original naming)
|
||||||
PROMPT 6. Revert all archival strategies to THRESHOLD_BASED
|
PROMPT 6. Drop all configuration columns (ARCHIVAL_STRATEGY, MINIMUM_AGE_MONTHS, IS_ARCHIVE_ENABLED, IS_KEEP_IN_TRASH)
|
||||||
|
PROMPT 7. Restore FILE_ARCHIVER package to v2.0.0
|
||||||
|
PROMPT 8. Restore FILE_MANAGER package to v3.3.1
|
||||||
|
PROMPT 9. Revert all archival strategies to THRESHOLD_BASED
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Timestamp:
|
PROMPT Timestamp:
|
||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_start FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_start FROM DUAL;
|
||||||
@@ -56,38 +59,61 @@ WHENEVER SQLERROR CONTINUE
|
|||||||
|
|
||||||
-- Rollback steps (in reverse order)
|
-- Rollback steps (in reverse order)
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 1/7: Rolling back TRASH retention statuses
|
PROMPT Step 1/9: Rolling back TRASH retention statuses
|
||||||
PROMPT ================================================
|
PROMPT ================================================
|
||||||
@@90_MARS_828_rollback_trash_retention_statuses.sql
|
@@90_MARS_828_rollback_trash_retention_statuses.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 2/7: Revoking T_FILENAME privileges from MRDS_LOADER
|
PROMPT Step 2/9: Revoking T_FILENAME privileges from MRDS_LOADER
|
||||||
PROMPT ==========================================================
|
PROMPT ==========================================================
|
||||||
@@95_MARS_828_rollback_grant_t_filename.sql
|
@@95_MARS_828_rollback_grant_t_filename.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 3/7: Dropping validation trigger
|
PROMPT Step 3/9: Dropping validation trigger
|
||||||
PROMPT ======================================
|
PROMPT ======================================
|
||||||
@@93_MARS_828_rollback_trigger.sql
|
@@93_MARS_828_rollback_trigger.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 4/7: Dropping all archival configuration columns
|
PROMPT Step 4/9 (OPTIONAL): Removing column comments
|
||||||
|
PROMPT ==============================================
|
||||||
|
PROMPT NOTE: This is optional - comments do not affect functionality
|
||||||
|
PROMPT Skipping column comments removal in standard rollback
|
||||||
|
PROMPT Execute 94b_MARS_828_rollback_column_comments.sql manually if needed
|
||||||
|
PROMPT
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 5/9: Reverting threshold column renames
|
||||||
|
PROMPT =============================================
|
||||||
|
@@94a_MARS_828_rollback_threshold_rename.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 6/9: Dropping all archival configuration columns
|
||||||
PROMPT ======================================================
|
PROMPT ======================================================
|
||||||
@@94_MARS_828_rollback_columns.sql
|
@@94_MARS_828_rollback_columns.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 5/7: Restoring FILE_ARCHIVER Package Specification v2.0.0
|
PROMPT Step 7/9: Restoring FILE_ARCHIVER Package Specification v2.0.0
|
||||||
PROMPT ===============================================================
|
PROMPT ===============================================================
|
||||||
@@91_MARS_828_rollback_FILE_ARCHIVER_SPEC.sql
|
@@91_MARS_828_rollback_FILE_ARCHIVER_SPEC.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 6/7: Restoring FILE_ARCHIVER Package Body v2.0.0
|
PROMPT Step 8/11: Restoring FILE_ARCHIVER Package Body v2.0.0
|
||||||
PROMPT ======================================================
|
PROMPT =======================================================
|
||||||
@@92_MARS_828_rollback_FILE_ARCHIVER_BODY.sql
|
@@92_MARS_828_rollback_FILE_ARCHIVER_BODY.sql
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Step 7/7: Verifying tracked packages
|
PROMPT Step 9/11: Restoring FILE_MANAGER Package Specification v3.3.1
|
||||||
PROMPT =====================================
|
PROMPT ===============================================================
|
||||||
|
@@97_MARS_828_rollback_FILE_MANAGER_SPEC.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 10/11: Restoring FILE_MANAGER Package Body v3.3.1
|
||||||
|
PROMPT ======================================================
|
||||||
|
@@98_MARS_828_rollback_FILE_MANAGER_BODY.sql
|
||||||
|
|
||||||
|
PROMPT
|
||||||
|
PROMPT Step 11/11: Verifying tracked packages
|
||||||
|
PROMPT ======================================
|
||||||
@@verify_packages_version.sql
|
@@verify_packages_version.sql
|
||||||
|
|
||||||
-- Verify rollback
|
-- Verify rollback
|
||||||
@@ -97,9 +123,9 @@ PROMPT =========================================
|
|||||||
SELECT object_name, object_type, status, last_ddl_time
|
SELECT object_name, object_type, status, last_ddl_time
|
||||||
FROM all_objects
|
FROM all_objects
|
||||||
WHERE owner = 'CT_MRDS'
|
WHERE owner = 'CT_MRDS'
|
||||||
AND object_name = 'FILE_ARCHIVER'
|
AND object_name IN ('FILE_ARCHIVER', 'FILE_MANAGER')
|
||||||
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
||||||
ORDER BY object_type;
|
ORDER BY object_name, object_type;
|
||||||
|
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT ============================================================================
|
PROMPT ============================================================================
|
||||||
@@ -109,8 +135,9 @@ PROMPT Completion Time:
|
|||||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_end FROM DUAL;
|
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_end FROM DUAL;
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Rollback Summary:
|
PROMPT Rollback Summary:
|
||||||
PROMPT - Package: CT_MRDS.FILE_ARCHIVER
|
PROMPT - Packages Rolled Back:
|
||||||
PROMPT - Restored Version: 2.0.0 (THRESHOLD_BASED archival only)
|
PROMPT * CT_MRDS.FILE_ARCHIVER to v2.0.0 (THRESHOLD_BASED archival only)
|
||||||
|
PROMPT * CT_MRDS.FILE_MANAGER to v3.3.1 (pre-MARS-828 threshold column compatibility)
|
||||||
PROMPT - Removed Features: CURRENT_MONTH_ONLY, MINIMUM_AGE_MONTHS, HYBRID strategies
|
PROMPT - Removed Features: CURRENT_MONTH_ONLY, MINIMUM_AGE_MONTHS, HYBRID strategies
|
||||||
PROMPT
|
PROMPT
|
||||||
PROMPT Log file: &_filename
|
PROMPT Log file: &_filename
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,637 @@
|
|||||||
|
create or replace PACKAGE CT_MRDS.FILE_MANAGER
|
||||||
|
AUTHID CURRENT_USER
|
||||||
|
AS
|
||||||
|
/**
|
||||||
|
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||||
|
* It is a standard.
|
||||||
|
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||||
|
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Example comment:
|
||||||
|
/**
|
||||||
|
* @name EX_PROCEDURE_NAME
|
||||||
|
* @desc Procedure description
|
||||||
|
* @example select FILE_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||||
|
* @ex_rslt Example Result
|
||||||
|
**/
|
||||||
|
|
||||||
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.3.1';
|
||||||
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-11-27 14:00:00';
|
||||||
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
|
-- Version History (Latest changes first)
|
||||||
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.1 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
||||||
|
'3.2.0 (2025-10-22): Added package versioning system using centralized ENV_MANAGER functions' || CHR(13)||CHR(10) ||
|
||||||
|
'3.1.0 (2025-10-20): Enhanced PROCESS_SOURCE_FILE with 6-step validation workflow' || CHR(13)||CHR(10) ||
|
||||||
|
'3.0.0 (2025-10-15): Separated export procedures into dedicated DATA_EXPORTER package' || CHR(13)||CHR(10) ||
|
||||||
|
'2.5.0 (2025-10-10): Added DELETE_SOURCE_CASCADE for safe configuration removal' || CHR(13)||CHR(10) ||
|
||||||
|
'2.0.0 (2025-09-25): Added official path patterns support (INBOX 3-level, ODS 2-level, ARCHIVE 2-level)' || CHR(13)||CHR(10) ||
|
||||||
|
'1.0.0 (2025-09-01): Initial release with file processing and validation capabilities';
|
||||||
|
|
||||||
|
TYPE tSourceFileReceived IS RECORD
|
||||||
|
(
|
||||||
|
A_SOURCE_FILE_RECEIVED_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE,
|
||||||
|
A_SOURCE_FILE_CONFIG_KEY CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_CONFIG_KEY%TYPE,
|
||||||
|
SOURCE_FILE_PREFIX_INBOX VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ODS VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_QUARANTINE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_PREFIX_ARCHIVE VARCHAR2(430),
|
||||||
|
SOURCE_FILE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.SOURCE_FILE_NAME%TYPE,
|
||||||
|
RECEPTION_DATE CT_MRDS.A_SOURCE_FILE_RECEIVED.RECEPTION_DATE%TYPE,
|
||||||
|
PROCESSING_STATUS CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESSING_STATUS%TYPE,
|
||||||
|
EXTERNAL_TABLE_NAME CT_MRDS.A_SOURCE_FILE_RECEIVED.EXTERNAL_TABLE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
|
vgSourceFileConfigKey PLS_INTEGER;
|
||||||
|
vgMsgTmp VARCHAR2(32000);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Get source file type by matching the source file name against source file type naming patterns
|
||||||
|
* or by specifying the id of a received source file.
|
||||||
|
* @example ...
|
||||||
|
* @ex_rslt "CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE"
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG(pFileUri IN VARCHAR2 DEFAULT NULL
|
||||||
|
, pSourceFileReceivedKey IN NUMBER DEFAULT NULL
|
||||||
|
, pSourceFileConfigKey IN NUMBER DEFAULT NULL)
|
||||||
|
RETURN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a newly received source file in A_SOURCE_FILE_RECEIVED table.
|
||||||
|
* This overload automatically determines source file type from the file name.
|
||||||
|
* It returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name REGISTER_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc Register a new new source file in A_SOURCE_FILE_RECEIVED table based on pSourceFileReceivedName and pSourceFileConfig.
|
||||||
|
* Then it returns the value of A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY column for newly added record.
|
||||||
|
* @example vSourceFileReceivedKey := FILE_MANAGER.REGISTER_SOURCE_FILE_RECEIVED(
|
||||||
|
* pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv'
|
||||||
|
* ,pSourceFileConfig => ...A_SOURCE_FILE_CONFIG%ROWTYPE... );
|
||||||
|
* @ex_rslt 3245
|
||||||
|
**/
|
||||||
|
FUNCTION REGISTER_SOURCE_FILE_RECEIVED (
|
||||||
|
pSourceFileReceivedName IN VARCHAR2,
|
||||||
|
pSourceFileConfig IN CT_MRDS.A_SOURCE_FILE_CONFIG%ROWTYPE
|
||||||
|
)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name SET_SOURCE_FILE_RECEIVED_STATUS
|
||||||
|
* @desc Set status of file in A_SOURCE_FILE_RECEIVED table - PROCESSING_STATUS column
|
||||||
|
* based on A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY
|
||||||
|
* and provided value of pStatus parameter
|
||||||
|
* @example exec FILE_MANAGER.SET_SOURCE_FILE_RECEIVED_STATUS(pSourceFileReceivedKey => 377, pStatus => 'READY_FOR_INGESTION');
|
||||||
|
**/
|
||||||
|
PROCEDURE SET_SOURCE_FILE_RECEIVED_STATUS(
|
||||||
|
pSourceFileReceivedKey IN PLS_INTEGER,
|
||||||
|
pStatus IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_EXTERNAL_TABLE_COLUMNS
|
||||||
|
* @desc Function used to get string with all table columns definitions based on pTargetTableTemplate "TEMPLATE TABLE" name.
|
||||||
|
* It used for creating "EXTERNAL TABLE" using CREATE_EXTERNAL_TABLE procedure.
|
||||||
|
* @example select FILE_MANAGER.GET_EXTERNAL_TABLE_COLUMNS(pTargetTableTemplate => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER') from dual;
|
||||||
|
* @ex_rslt "A_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "A_WORKFLOW_HISTORY_KEY" NUMBER(38,0) NOT NULL ENABLE,
|
||||||
|
* "REV_NUMBER" NUMBER(28,0),
|
||||||
|
* "REF_DATE" DATE,
|
||||||
|
* "FREE_TEXT" VARCHAR2(1000 CHAR),
|
||||||
|
* "MLF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_BS_TOTAL" NUMBER(28,10),
|
||||||
|
* "MLF_SF_TOTAL" NUMBER(28,10),
|
||||||
|
* "DF_SF_TOTAL" NUMBER(28,10)
|
||||||
|
**/
|
||||||
|
FUNCTION GET_EXTERNAL_TABLE_COLUMNS (
|
||||||
|
pTargetTableTemplate IN VARCHAR2
|
||||||
|
)
|
||||||
|
RETURN CLOB;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.CREATE_EXTERNAL_TABLE which creates External Table
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252')
|
||||||
|
* If provided, adds CHARACTERSET clause to external table definition
|
||||||
|
* @example
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLE(
|
||||||
|
* pTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER',
|
||||||
|
* pPrefix => 'ODS/LM/STANDING_FACILITIES_HEADER/',
|
||||||
|
* pBucketUri => 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/',
|
||||||
|
* pFileName => NULL,
|
||||||
|
* pDelimiter => ',',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
* end;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pTableName IN VARCHAR2,
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pPrefix IN VARCHAR2,
|
||||||
|
pBucketUri IN VARCHAR2 DEFAULT ENV_MANAGER.gvInboxBucketUri,
|
||||||
|
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pDelimiter IN VARCHAR2 DEFAULT ',',
|
||||||
|
pEncoding IN VARCHAR2 DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLE
|
||||||
|
* @desc Creates External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.CREATE_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);;
|
||||||
|
**/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
* @desc A wrapper procedure for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE
|
||||||
|
* It validate External table build upon single file
|
||||||
|
* provided by pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.VALIDATE_SOURCE_FILE_RECEIVED(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE VALIDATE_SOURCE_FILE_RECEIVED
|
||||||
|
(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A wrapper function for DBMS_CLOUD.VALIDATE_EXTERNAL_TABLE.
|
||||||
|
* It validates External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt FAILED
|
||||||
|
**/
|
||||||
|
FUNCTION VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name S_VALIDATE_EXTERNAL_TABLE
|
||||||
|
* @desc A function which checks if SELECT query reterns any rows.
|
||||||
|
* It trys to selects External Table provided by parameter pTableName.
|
||||||
|
* It returns: PASSED or FAILED.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vStatus VARCHAR2(100);
|
||||||
|
* begin
|
||||||
|
* vStatus := FILE_MANAGER.S_VALIDATE_EXTERNAL_TABLE(pTableName => 'STANDING_FACILITIES_HEADER');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vStatus = '||vStatus);
|
||||||
|
* end;
|
||||||
|
*
|
||||||
|
* @ex_rslt PASSED
|
||||||
|
**/
|
||||||
|
FUNCTION S_VALIDATE_EXTERNAL_TABLE(pTableName IN VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DROP_EXTERNAL_TABLE
|
||||||
|
* @desc It drops External Table for single file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* @example exec FILE_MANAGER.DROP_EXTERNAL_TABLE(pSourceFileReceivedKey => 377);
|
||||||
|
**/
|
||||||
|
PROCEDURE DROP_EXTERNAL_TABLE (
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name COPY_FILE
|
||||||
|
* @desc It copies file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS'
|
||||||
|
* @example exec FILE_MANAGER.COPY_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE COPY_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name MOVE_FILE
|
||||||
|
* @desc It moves file provided by
|
||||||
|
* pSourceFileReceivedKey parameter (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY)
|
||||||
|
* into destination provided by pDestination parameter.
|
||||||
|
* pDestination parameter allowed values are: 'ODS', 'QUARANTINE'
|
||||||
|
* @example exec FILE_MANAGER.MOVE_FILE(pSourceFileReceivedKey => 377, pDestination => 'ODS');
|
||||||
|
**/
|
||||||
|
PROCEDURE MOVE_FILE(
|
||||||
|
pSourceFileReceivedKey IN NUMBER,
|
||||||
|
pDestination IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_FOLDER_CONTENTS
|
||||||
|
* @desc It deletes all files from specified folder in the cloud storage.
|
||||||
|
* The procedure lists all objects in the specified folder prefix and deletes them one by one.
|
||||||
|
* pBucketArea parameter specifies which bucket to use: 'INBOX', 'DATA', 'ARCHIVE'
|
||||||
|
* pFolderPrefix parameter specifies the folder path within the bucket (e.g., 'C2D/UC_DISSEM/UC_NMA_DISSEM/')
|
||||||
|
* @example exec FILE_MANAGER.DELETE_FOLDER_CONTENTS(pBucketArea => 'INBOX', pFolderPrefix => 'C2D/UC_DISSEM/UC_NMA_DISSEM/');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_FOLDER_CONTENTS(
|
||||||
|
pBucketArea IN VARCHAR2,
|
||||||
|
pFolderPrefix IN VARCHAR2
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter.
|
||||||
|
* Ubmrella procedure that calls:
|
||||||
|
* - REGISTER_SOURCE_FILE_RECEIVED;
|
||||||
|
* - CREATE_EXTERNAL_TABLE;
|
||||||
|
* - VALIDATE_SOURCE_FILE_RECEIVED;
|
||||||
|
* - DROP_EXTERNAL_TABLE;
|
||||||
|
* - MOVE_FILE;
|
||||||
|
* @example exec FILE_MANAGER.PROCESS_SOURCE_FILE(pSourceFileReceivedName => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
**/
|
||||||
|
PROCEDURE PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name PROCESS_SOURCE_FILE
|
||||||
|
* @desc It process file provided by pSourceFileReceivedName parameter and return processing result value.
|
||||||
|
* It returns (success/failure) => 0 / -(value).
|
||||||
|
* Ubmrella function that calls PROCESS_SOURCE_FILE procedure.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vResult PLS_INTEGER;
|
||||||
|
* begin
|
||||||
|
* vResult := CT_MRDS.FILE_MANAGER.PROCESS_SOURCE_FILE(PSOURCEFILERECEIVEDNAME => 'INBOX/C2D/UC_DISSEM/UC_NMA_DISSEM/UC_NMA_DISSEM-277740.csv');
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vResult = ' || vResult);
|
||||||
|
* end;
|
||||||
|
* @ex_rslt 0
|
||||||
|
* -20021
|
||||||
|
**/
|
||||||
|
FUNCTION PROCESS_SOURCE_FILE(pSourceFileReceivedName IN VARCHAR2)
|
||||||
|
RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DATE_FORMAT
|
||||||
|
* @desc Returns date format for specified template table name and column name.
|
||||||
|
* Date is taken from configuration A_COLUMN_DATE_FORMAT table.
|
||||||
|
* @example select FILE_MANAGER.GET_DATE_FORMAT(
|
||||||
|
* pTemplateTableName => 'STANDING_FACILITIES_HEADER',
|
||||||
|
* pColumnName => 'SNAPSHOT_DATE')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt DD/MM/YYYY HH24:MI:SS
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DATE_FORMAT(
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnName IN VARCHAR2
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GENERATE_EXTERNAL_TABLE_PARAMS
|
||||||
|
* @desc It builds two strings: pColumnList and pFieldList for specified Template Table name, by parameter: pTemplateTableName.
|
||||||
|
* @example
|
||||||
|
* declare
|
||||||
|
* vColumnList CLOB;
|
||||||
|
* vFieldList CLOB;
|
||||||
|
* begin
|
||||||
|
* FILE_MANAGER.GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.LM_STANDING_FACILITIES_HEADER'
|
||||||
|
* ,pColumnList => vColumnList
|
||||||
|
* ,pFieldList => vFieldList
|
||||||
|
* );
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vColumnList = '||vColumnList);
|
||||||
|
* DBMS_OUTPUT.PUT_LINE('vFieldList = '||vFieldList);
|
||||||
|
* end;
|
||||||
|
* /
|
||||||
|
**/
|
||||||
|
PROCEDURE GENERATE_EXTERNAL_TABLE_PARAMS (
|
||||||
|
pTemplateTableName IN VARCHAR2,
|
||||||
|
pColumnList OUT CLOB,
|
||||||
|
pFieldList OUT CLOB
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE
|
||||||
|
* @desc Insert a new record to A_SOURCE table.
|
||||||
|
* pSourceKey is a PRIMARY KEY value.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE,
|
||||||
|
pSourceName IN CT_MRDS.A_SOURCE.SOURCE_NAME%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name DELETE_SOURCE_CASCADE
|
||||||
|
* @desc Safely deletes a SOURCE specified by pSourceKey parameter from A_SOURCE table and all dependent tables:
|
||||||
|
* - A_SOURCE_FILE_CONFIG
|
||||||
|
* - A_SOURCE_FILE_RECEIVED
|
||||||
|
* - A_COLUMN_DATE_FORMAT (only if template table is not shared with other source systems)
|
||||||
|
* The procedure checks if template tables are shared before deleting date format configurations.
|
||||||
|
* If a template table is used by multiple source systems, date formats are preserved.
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.DELETE_SOURCE_CASCADE(pSourceKey => 'TEST_SYS');
|
||||||
|
**/
|
||||||
|
PROCEDURE DELETE_SOURCE_CASCADE (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE.A_SOURCE_KEY%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_CONTAINER_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified parameter pSourceFileId (A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID)
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY for related CONTAINER record.
|
||||||
|
* @example select FILE_MANAGER.GET_CONTAINER_SOURCE_FILE_CONFIG_KEY(
|
||||||
|
* pSourceFileId => 'UC_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_CONTAINER_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_SOURCE_FILE_CONFIG_KEY
|
||||||
|
* @desc For specified input parameters,
|
||||||
|
* it returns A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY.
|
||||||
|
* @example select FILE_MANAGER.GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
* pSourceFileType => 'INPUT'
|
||||||
|
* ,pSourceFileId => 'UC_DISSEM'
|
||||||
|
* ,pTableId => 'UC_NMA_DISSEM')
|
||||||
|
* from dual;
|
||||||
|
* @ex_rslt 126
|
||||||
|
**/
|
||||||
|
FUNCTION GET_SOURCE_FILE_CONFIG_KEY (
|
||||||
|
pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE DEFAULT 'INPUT'
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE
|
||||||
|
) RETURN PLS_INTEGER;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_SOURCE_FILE_CONFIG
|
||||||
|
* @desc Insert a new record to A_SOURCE_FILE_CONFIG table.
|
||||||
|
* MARS-1049: Added pEncoding parameter for CSV character set specification.
|
||||||
|
* @param pEncoding - Character set encoding for CSV files (e.g., 'UTF8', 'WE8MSWIN1252', 'EE8ISO8859P2')
|
||||||
|
* If NULL, no CHARACTERSET clause is added to external table definitions
|
||||||
|
* @example CALL CT_MRDS.FILE_MANAGER.ADD_SOURCE_FILE_CONFIG(
|
||||||
|
* pSourceKey => 'C2D', pSourceFileType => 'INPUT',
|
||||||
|
* pSourceFileId => 'UC_DISSEM', pTableId => 'METADATA_LOADS',
|
||||||
|
* pTemplateTableName => 'CT_ET_TEMPLATES.C2D_A_UC_DISSEM_METADATA_LOADS',
|
||||||
|
* pEncoding => 'UTF8'
|
||||||
|
* );
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_SOURCE_FILE_CONFIG (
|
||||||
|
pSourceKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_KEY%TYPE
|
||||||
|
,pSourceFileType IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_TYPE%TYPE
|
||||||
|
,pSourceFileId IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_ID%TYPE
|
||||||
|
,pSourceFileDesc IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_DESC%TYPE
|
||||||
|
,pSourceFileNamePattern IN CT_MRDS.A_SOURCE_FILE_CONFIG.SOURCE_FILE_NAME_PATTERN%TYPE
|
||||||
|
,pTableId IN CT_MRDS.A_SOURCE_FILE_CONFIG.TABLE_ID%TYPE DEFAULT NULL
|
||||||
|
,pTemplateTableName IN CT_MRDS.A_SOURCE_FILE_CONFIG.TEMPLATE_TABLE_NAME%TYPE DEFAULT NULL
|
||||||
|
,pContainerFileKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.CONTAINER_FILE_KEY%TYPE DEFAULT NULL
|
||||||
|
,pEncoding IN CT_MRDS.A_SOURCE_FILE_CONFIG.ENCODING%TYPE DEFAULT NULL -- MARS-1049: NOWY PARAMETR
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ADD_COLUMN_DATE_FORMAT
|
||||||
|
* @desc Insert a new record to A_COLUMN_DATE_FORMAT table.
|
||||||
|
**/
|
||||||
|
PROCEDURE ADD_COLUMN_DATE_FORMAT (
|
||||||
|
pTemplateTableName IN CT_MRDS.A_COLUMN_DATE_FORMAT.TEMPLATE_TABLE_NAME%TYPE
|
||||||
|
,pColumnName IN CT_MRDS.A_COLUMN_DATE_FORMAT.COLUMN_NAME%TYPE
|
||||||
|
,pDateFormat IN CT_MRDS.A_COLUMN_DATE_FORMAT.DATE_FORMAT%TYPE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUCKET_URI
|
||||||
|
* @desc Function used to get string with bucket http url.
|
||||||
|
* Possible input values for pBucketArea are: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||||
|
* @example select FILE_MANAGER.GET_BUCKET_URI(pBucketArea => 'ODS') from dual;
|
||||||
|
* @ex_rslt https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frcnomajoc7v/b/mrds_data_tst/o/
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUCKET_URI(pBucketArea VARCHAR2)
|
||||||
|
RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_CONFIG_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_CONFIG record
|
||||||
|
* for specified pSourceFileConfigKey (A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY).
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
* pSourceFileConfigKey => 128
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about File Configuration:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 128
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Details about related Container Config:
|
||||||
|
* --------------------------------
|
||||||
|
* A_SOURCE_FILE_CONFIG_KEY = 126
|
||||||
|
* A_SOURCE_KEY = C2D
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
*
|
||||||
|
* Column Date Format config entries:
|
||||||
|
* --------------------------------
|
||||||
|
* TEMPLATE_TABLE_NAME = CT_ET_TEMPLATES.C2D_UC_MA_DISSEM
|
||||||
|
* ...
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_CONFIG_INFO (
|
||||||
|
pSourceFileConfigKey IN CT_MRDS.A_SOURCE_FILE_CONFIG.A_SOURCE_FILE_CONFIG_KEY%TYPE
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_SOURCE_FILE_RECEIVED_INFO
|
||||||
|
* @desc Function returns details about A_SOURCE_FILE_RECEIVED record
|
||||||
|
* for specified pSourceFileReceivedKey (A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY).
|
||||||
|
* If pIncludeConfigInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeContainerInfo is <> 0 it returns additional info about related Container config record (A_SOURCE_FILE_CONFIG)
|
||||||
|
* If pIncludeColumnFormatInfo is <> 0 it returns additional info about related ColumnFormat config record (A_COLUMN_DATE_FORMAT)
|
||||||
|
* @example select FILE_MANAGER.GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
* pSourceFileReceivedKey => 377
|
||||||
|
* ,pIncludeConfigInfo => 1
|
||||||
|
* ,pIncludeContainerInfo => 1
|
||||||
|
* ,pIncludeColumnFormatInfo => 1
|
||||||
|
* ) from dual;
|
||||||
|
*
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_SOURCE_FILE_RECEIVED_INFO (
|
||||||
|
pSourceFileReceivedKey IN CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE
|
||||||
|
,pIncludeConfigInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeContainerInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
,pIncludeColumnFormatInfo IN PLS_INTEGER DEFAULT 1
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_DET_USER_LOAD_OPERATIONS
|
||||||
|
* @desc Function returns details from USER_LOAD_OPERATIONS table
|
||||||
|
* for specified pOperationId.
|
||||||
|
* @example select FILE_MANAGER.GET_DET_USER_LOAD_OPERATIONS (pOperationId => 3608) from dual;
|
||||||
|
* @ex_rslt
|
||||||
|
* Details about USER_LOAD_OPERATIONS where ID = 3608
|
||||||
|
* --------------------------------
|
||||||
|
* ID = 3608
|
||||||
|
* TYPE = VALIDATE
|
||||||
|
* SID = 31260
|
||||||
|
* SERIAL# = 52915
|
||||||
|
* START_TIME = 2025-05-20 10.08.24.436983 EUROPE/BELGRADE
|
||||||
|
* UPDATE_TIME = 2025-05-20 10.08.24.458643 EUROPE/BELGRADE
|
||||||
|
* STATUS = FAILED
|
||||||
|
* OWNER_NAME = CT_MRDS
|
||||||
|
* TABLE_NAME = STANDING_FACILITIES_HEADER
|
||||||
|
* PARTITION_NAME =
|
||||||
|
* SUBPARTITION_NAME =
|
||||||
|
* FILE_URI_LIST =
|
||||||
|
* ROWS_LOADED =
|
||||||
|
* LOGFILE_TABLE = VALIDATE$3608_LOG
|
||||||
|
* BADFILE_TABLE = VALIDATE$3608_BAD
|
||||||
|
* STATUS_TABLE =
|
||||||
|
* TEMPEXT_TABLE =
|
||||||
|
* CREDENTIAL_NAME =
|
||||||
|
* EXPIRATION_TIME = 2025-05-22 10.08.24.436983000 EUROPE/BELGRADE
|
||||||
|
* --------------------------------
|
||||||
|
**/
|
||||||
|
FUNCTION GET_DET_USER_LOAD_OPERATIONS (
|
||||||
|
pOperationId PLS_INTEGER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name ANALYZE_VALIDATION_ERRORS
|
||||||
|
* @desc Wrapper function that analyzes validation errors for a source file using its received key.
|
||||||
|
* Automatically derives template schema, table name, CSV URI and validation log table
|
||||||
|
* from file metadata and calls ENV_MANAGER.ANALYZE_VALIDATION_ERRORS.
|
||||||
|
* @example SELECT FILE_MANAGER.ANALYZE_VALIDATION_ERRORS(63) FROM DUAL;
|
||||||
|
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||||
|
**/
|
||||||
|
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||||
|
pSourceFileReceivedKey IN NUMBER
|
||||||
|
) RETURN VARCHAR2;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION
|
||||||
|
* @desc Returns the current version number of the FILE_MANAGER package.
|
||||||
|
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION() FROM DUAL;
|
||||||
|
* @ex_rslt 3.2.0
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_BUILD_INFO
|
||||||
|
* @desc Returns comprehensive build information including version, build date, and author.
|
||||||
|
* Uses centralized ENV_MANAGER.GET_PACKAGE_VERSION_INFO function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||||
|
* @ex_rslt Package: FILE_MANAGER
|
||||||
|
* Version: 3.2.0
|
||||||
|
* Build Date: 2025-10-22 16:30:00
|
||||||
|
* Author: Grzegorz Michalski
|
||||||
|
**/
|
||||||
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name GET_VERSION_HISTORY
|
||||||
|
* @desc Returns complete version history with all releases and changes.
|
||||||
|
* Uses centralized ENV_MANAGER.FORMAT_VERSION_HISTORY function.
|
||||||
|
* @example SELECT FILE_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||||
|
* @ex_rslt FILE_MANAGER Version History:
|
||||||
|
* 3.2.0 (2025-10-22): Added package versioning system...
|
||||||
|
**/
|
||||||
|
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||||
|
|
||||||
|
END;
|
||||||
|
|
||||||
|
/
|
||||||
|
|
||||||
|
/
|
||||||
@@ -29,7 +29,8 @@ DECLARE
|
|||||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
vPackageList t_string_array := t_string_array(
|
vPackageList t_string_array := t_string_array(
|
||||||
'CT_MRDS.FILE_ARCHIVER'
|
'CT_MRDS.FILE_ARCHIVER',
|
||||||
|
'CT_MRDS.FILE_MANAGER'
|
||||||
);
|
);
|
||||||
-- ===================================================================
|
-- ===================================================================
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ END;
|
|||||||
/
|
/
|
||||||
|
|
||||||
CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
||||||
CHUNK_ID NUMBER PRIMARY KEY,
|
CHUNK_ID NUMBER NOT NULL,
|
||||||
TASK_NAME VARCHAR2(100) NOT NULL,
|
TASK_NAME VARCHAR2(100) NOT NULL,
|
||||||
YEAR_VALUE VARCHAR2(4) NOT NULL,
|
YEAR_VALUE VARCHAR2(4) NOT NULL,
|
||||||
MONTH_VALUE VARCHAR2(2) NOT NULL,
|
MONTH_VALUE VARCHAR2(2) NOT NULL,
|
||||||
@@ -47,14 +47,16 @@ CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
|||||||
STATUS VARCHAR2(30) DEFAULT 'PENDING' NOT NULL,
|
STATUS VARCHAR2(30) DEFAULT 'PENDING' NOT NULL,
|
||||||
ERROR_MESSAGE VARCHAR2(4000),
|
ERROR_MESSAGE VARCHAR2(4000),
|
||||||
EXPORT_TIMESTAMP TIMESTAMP,
|
EXPORT_TIMESTAMP TIMESTAMP,
|
||||||
CREATED_DATE TIMESTAMP DEFAULT SYSTIMESTAMP NOT NULL
|
CREATED_DATE TIMESTAMP DEFAULT SYSTIMESTAMP NOT NULL,
|
||||||
|
CONSTRAINT PK_PARALLEL_EXPORT_CHUNKS PRIMARY KEY (TASK_NAME, CHUNK_ID)
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX IX_PARALLEL_CHUNKS_TASK ON CT_MRDS.A_PARALLEL_EXPORT_CHUNKS(TASK_NAME);
|
-- Index for status-based queries (e.g., WHERE STATUS = 'FAILED' AND TASK_NAME = ?)
|
||||||
|
CREATE INDEX IX_PARALLEL_CHUNKS_STATUS_TASK ON CT_MRDS.A_PARALLEL_EXPORT_CHUNKS(STATUS, TASK_NAME);
|
||||||
|
|
||||||
COMMENT ON TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS IS 'Permanent table for parallel export chunk processing (DBMS_PARALLEL_EXECUTE) - permanent because GTT data not visible in parallel callback sessions';
|
COMMENT ON TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS IS 'Permanent table for parallel export chunk processing (DBMS_PARALLEL_EXECUTE) - permanent because GTT data not visible in parallel callback sessions. PK: (TASK_NAME, CHUNK_ID) ensures session isolation for concurrent exports.';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.CHUNK_ID IS 'Unique chunk identifier (partition number)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.CHUNK_ID IS 'Chunk identifier within task (partition number) - unique per TASK_NAME, not globally';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TASK_NAME IS 'DBMS_PARALLEL_EXECUTE task name for cleanup';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TASK_NAME IS 'DBMS_PARALLEL_EXECUTE task name - session isolation key, part of composite PK with CHUNK_ID';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.YEAR_VALUE IS 'Partition year (YYYY)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.YEAR_VALUE IS 'Partition year (YYYY)';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MONTH_VALUE IS 'Partition month (MM)';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MONTH_VALUE IS 'Partition month (MM)';
|
||||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.SCHEMA_NAME IS 'Schema owning the source table';
|
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.SCHEMA_NAME IS 'Schema owning the source table';
|
||||||
|
|||||||
@@ -18,34 +18,104 @@ AS
|
|||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes export file from OCI bucket if it exists (used for cleanup before retry)
|
* Deletes ALL files matching specific file pattern before retry export
|
||||||
* Silently ignores if file doesn't exist (ORA-20404)
|
* Critical for preventing data duplication when DBMS_CLOUD.EXPORT_DATA fails mid-process
|
||||||
|
*
|
||||||
|
* Problem: Export fails after creating partial file(s), retry creates new _2, _3 suffixed files
|
||||||
|
* Solution: Delete ALL files matching the base filename pattern before retry
|
||||||
|
*
|
||||||
|
* Pattern matching strategy:
|
||||||
|
* - Parquet: folder/PARTITION_YEAR=2024/PARTITION_MONTH=11/*.parquet (folder-level safe - each chunk has own partition folder)
|
||||||
|
* - CSV: folder/TABLENAME_202411*.csv (file-level pattern - multiple chunks share same folder!)
|
||||||
|
*
|
||||||
|
* CRITICAL for parallel processing:
|
||||||
|
* - Parquet chunks are isolated by partition folder structure (safe to delete folder/*)
|
||||||
|
* - CSV chunks share flat folder structure - MUST use file-specific pattern (TABLENAME_YYYYMM*)
|
||||||
|
* to avoid deleting files from other parallel chunks in same folder
|
||||||
**/
|
**/
|
||||||
PROCEDURE DELETE_FAILED_EXPORT_FILE(
|
PROCEDURE DELETE_FAILED_EXPORT_FILE(
|
||||||
pFileUri IN VARCHAR2,
|
pFileUri IN VARCHAR2,
|
||||||
pCredentialName IN VARCHAR2,
|
pCredentialName IN VARCHAR2,
|
||||||
pParameters IN VARCHAR2
|
pParameters IN VARCHAR2
|
||||||
) IS
|
) IS
|
||||||
|
vBucketUri VARCHAR2(4000);
|
||||||
|
vFolderPath VARCHAR2(4000);
|
||||||
|
vFileName VARCHAR2(1000);
|
||||||
|
vFileNamePattern VARCHAR2(1000);
|
||||||
|
vSlashPos NUMBER;
|
||||||
|
vDotPos NUMBER;
|
||||||
|
vFilesDeleted NUMBER := 0;
|
||||||
BEGIN
|
BEGIN
|
||||||
BEGIN
|
-- Extract components from URI
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Attempting to delete potentially corrupted file: ' || pFileUri, 'DEBUG', pParameters);
|
-- Example Parquet: https://.../bucket/folder/PARTITION_YEAR=2024/PARTITION_MONTH=11/202411.parquet
|
||||||
|
-- Example CSV: https://.../bucket/folder/TABLENAME_202411.csv
|
||||||
|
|
||||||
DBMS_CLOUD.DELETE_OBJECT(
|
-- Find last slash before filename
|
||||||
credential_name => pCredentialName,
|
vSlashPos := INSTR(pFileUri, '/', -1);
|
||||||
object_uri => pFileUri
|
|
||||||
);
|
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Deleted existing file (cleanup before retry): ' || pFileUri, 'INFO', pParameters);
|
IF vSlashPos > 0 THEN
|
||||||
EXCEPTION
|
-- Extract filename from URI (after last slash)
|
||||||
WHEN OTHERS THEN
|
vFileName := SUBSTR(pFileUri, vSlashPos + 1);
|
||||||
-- Object not found is OK (file doesn't exist)
|
|
||||||
IF SQLCODE = -20404 THEN
|
-- Extract folder path (before last slash)
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('File does not exist (OK): ' || pFileUri, 'DEBUG', pParameters);
|
vFolderPath := SUBSTR(pFileUri, 1, vSlashPos - 1);
|
||||||
ELSE
|
|
||||||
-- Log but don't fail - export will attempt anyway
|
-- Find bucket URI (protocol + namespace + bucket name)
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Warning: Could not delete file (will retry export anyway): ' || SQLERRM, 'WARNING', pParameters);
|
-- Bucket URI ends after /o/ in OCI Object Storage URLs
|
||||||
END IF;
|
vBucketUri := SUBSTR(pFileUri, 1, INSTR(pFileUri, '/o/') + 2);
|
||||||
END;
|
|
||||||
|
-- Extract relative folder path (after bucket)
|
||||||
|
vFolderPath := SUBSTR(vFolderPath, LENGTH(vBucketUri) + 1);
|
||||||
|
|
||||||
|
-- Create file pattern by removing extension
|
||||||
|
-- Oracle adds suffixes BEFORE extension: file.csv -> file_1_timestamp.csv
|
||||||
|
-- Pattern: file* matches file_1_timestamp.csv, file_2_timestamp.csv
|
||||||
|
vDotPos := INSTR(vFileName, '.', -1);
|
||||||
|
IF vDotPos > 0 THEN
|
||||||
|
vFileNamePattern := SUBSTR(vFileName, 1, vDotPos - 1) || '%';
|
||||||
|
ELSE
|
||||||
|
vFileNamePattern := vFileName || '%';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Cleanup before retry - Pattern: ' || vFolderPath || '/' || vFileNamePattern, 'DEBUG', pParameters);
|
||||||
|
|
||||||
|
-- List and delete ALL files matching pattern
|
||||||
|
-- CRITICAL: Uses file-specific pattern for CSV chunk isolation in shared folder
|
||||||
|
FOR rec IN (
|
||||||
|
SELECT object_name
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => pCredentialName,
|
||||||
|
location_uri => vBucketUri
|
||||||
|
))
|
||||||
|
WHERE object_name LIKE vFolderPath || '/' || vFileNamePattern
|
||||||
|
) LOOP
|
||||||
|
BEGIN
|
||||||
|
DBMS_CLOUD.DELETE_OBJECT(
|
||||||
|
credential_name => pCredentialName,
|
||||||
|
object_uri => vBucketUri || rec.object_name
|
||||||
|
);
|
||||||
|
|
||||||
|
vFilesDeleted := vFilesDeleted + 1;
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Deleted partial file ' || vFilesDeleted || ': ' || rec.object_name, 'DEBUG', pParameters);
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
-- Log but continue - don't fail entire cleanup
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Warning: Could not delete ' || rec.object_name || ': ' || SQLERRM, 'WARNING', pParameters);
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
IF vFilesDeleted > 0 THEN
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Cleanup completed: Deleted ' || vFilesDeleted || ' partial file(s) from previous failed export', 'INFO', pParameters);
|
||||||
|
ELSE
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('No existing files to clean up (pattern match: ' || vFileNamePattern || ')', 'DEBUG', pParameters);
|
||||||
|
END IF;
|
||||||
|
ELSE
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Warning: Cannot parse file URI for cleanup: ' || pFileUri, 'WARNING', pParameters);
|
||||||
|
END IF;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
-- Don't fail export if cleanup fails - log and continue
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Warning: Cleanup failed (will retry export anyway): ' || SQLERRM, 'WARNING', pParameters);
|
||||||
END DELETE_FAILED_EXPORT_FILE;
|
END DELETE_FAILED_EXPORT_FILE;
|
||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
@@ -415,6 +485,8 @@ AS
|
|||||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||||
|
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || pYear || '/' || pMonth || ' (Format: '||pFormat||')', 'DEBUG', pParameters);
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', pParameters);
|
||||||
-- Construct the URI based on format
|
-- Construct the URI based on format
|
||||||
IF pFormat = 'PARQUET' THEN
|
IF pFormat = 'PARQUET' THEN
|
||||||
-- Parquet: Use Hive-style partitioning
|
-- Parquet: Use Hive-style partitioning
|
||||||
@@ -425,6 +497,7 @@ AS
|
|||||||
'PARTITION_MONTH=' || sanitizeFilename(pMonth) || '/' ||
|
'PARTITION_MONTH=' || sanitizeFilename(pMonth) || '/' ||
|
||||||
sanitizeFilename(pYear) || sanitizeFilename(pMonth) || '.parquet';
|
sanitizeFilename(pYear) || sanitizeFilename(pMonth) || '.parquet';
|
||||||
|
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Parquet export URI: ' || vUri, 'DEBUG', pParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Parquet export URI: ' || vUri, 'DEBUG', pParameters);
|
||||||
|
|
||||||
-- Delete potentially corrupted file from previous failed attempt
|
-- Delete potentially corrupted file from previous failed attempt
|
||||||
@@ -445,6 +518,7 @@ AS
|
|||||||
sanitizeFilename(vFileName);
|
sanitizeFilename(vFileName);
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('CSV export URI: ' || vUri, 'DEBUG', pParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('CSV export URI: ' || vUri, 'DEBUG', pParameters);
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('CSV maxfilesize: ' || pMaxFileSize || ' bytes (' || ROUND(pMaxFileSize/1048576, 2) || ' MB)', 'DEBUG', pParameters);
|
||||||
|
|
||||||
-- Delete potentially corrupted file from previous failed attempt
|
-- Delete potentially corrupted file from previous failed attempt
|
||||||
-- This prevents Oracle from creating _1 suffixed files on retry
|
-- This prevents Oracle from creating _1 suffixed files on retry
|
||||||
@@ -472,8 +546,7 @@ AS
|
|||||||
RAISE_APPLICATION_ERROR(-20001, 'Unsupported format: ' || pFormat || '. Use PARQUET or CSV.');
|
RAISE_APPLICATION_ERROR(-20001, 'Unsupported format: ' || pFormat || '. Use PARQUET or CSV.');
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || pYear || '/' || pMonth || ' (Format: ' || pFormat || ')', 'DEBUG', pParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || pYear || '/' || pMonth, 'DEBUG', pParameters);
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', pParameters);
|
|
||||||
END EXPORT_SINGLE_PARTITION;
|
END EXPORT_SINGLE_PARTITION;
|
||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
@@ -485,7 +558,8 @@ AS
|
|||||||
**/
|
**/
|
||||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
pStartId IN NUMBER,
|
pStartId IN NUMBER,
|
||||||
pEndId IN NUMBER
|
pEndId IN NUMBER,
|
||||||
|
pTaskName IN VARCHAR2 DEFAULT NULL
|
||||||
) IS
|
) IS
|
||||||
vYear VARCHAR2(4);
|
vYear VARCHAR2(4);
|
||||||
vMonth VARCHAR2(2);
|
vMonth VARCHAR2(2);
|
||||||
@@ -502,9 +576,12 @@ AS
|
|||||||
vFileBaseName VARCHAR2(1000);
|
vFileBaseName VARCHAR2(1000);
|
||||||
vMaxFileSize NUMBER;
|
vMaxFileSize NUMBER;
|
||||||
vJobClass VARCHAR2(128);
|
vJobClass VARCHAR2(128);
|
||||||
|
vTaskName VARCHAR2(128);
|
||||||
vParameters VARCHAR2(4000);
|
vParameters VARCHAR2(4000);
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Retrieve chunk context from global temporary table
|
-- Retrieve chunk context from A_PARALLEL_EXPORT_CHUNKS table
|
||||||
|
-- CRITICAL: Filter by CHUNK_ID and TASK_NAME for precise session isolation
|
||||||
|
-- pTaskName parameter passed from RUN_TASK ensures deterministic single-row retrieval
|
||||||
SELECT
|
SELECT
|
||||||
YEAR_VALUE,
|
YEAR_VALUE,
|
||||||
MONTH_VALUE,
|
MONTH_VALUE,
|
||||||
@@ -520,7 +597,8 @@ AS
|
|||||||
FORMAT_TYPE,
|
FORMAT_TYPE,
|
||||||
FILE_BASE_NAME,
|
FILE_BASE_NAME,
|
||||||
MAX_FILE_SIZE,
|
MAX_FILE_SIZE,
|
||||||
JOB_CLASS
|
JOB_CLASS,
|
||||||
|
TASK_NAME
|
||||||
INTO
|
INTO
|
||||||
vYear,
|
vYear,
|
||||||
vMonth,
|
vMonth,
|
||||||
@@ -536,18 +614,22 @@ AS
|
|||||||
vFormat,
|
vFormat,
|
||||||
vFileBaseName,
|
vFileBaseName,
|
||||||
vMaxFileSize,
|
vMaxFileSize,
|
||||||
vJobClass
|
vJobClass,
|
||||||
|
vTaskName
|
||||||
FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||||
WHERE CHUNK_ID = pStartId;
|
WHERE CHUNK_ID = pStartId
|
||||||
|
AND TASK_NAME = pTaskName;
|
||||||
|
|
||||||
vParameters := 'Parallel task - Year: ' || vYear || ', Month: ' || vMonth || ', ChunkID: ' || pStartId;
|
vParameters := 'Parallel task - Year: ' || vYear || ', Month: ' || vMonth || ', ChunkID: ' || pStartId || ', TaskName: ' || vTaskName;
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Starting parallel export for partition ' || vYear || '/' || vMonth, 'DEBUG', vParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Starting parallel export for partition ' || vYear || '/' || vMonth, 'DEBUG', vParameters);
|
||||||
|
|
||||||
-- Mark chunk as PROCESSING
|
-- Mark chunk as PROCESSING
|
||||||
|
-- CRITICAL: Use both CHUNK_ID AND TASK_NAME for session isolation
|
||||||
UPDATE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
UPDATE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||||
SET STATUS = 'PROCESSING',
|
SET STATUS = 'PROCESSING',
|
||||||
ERROR_MESSAGE = NULL
|
ERROR_MESSAGE = NULL
|
||||||
WHERE CHUNK_ID = pStartId;
|
WHERE CHUNK_ID = pStartId
|
||||||
|
AND TASK_NAME = vTaskName;
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
-- Call the worker procedure
|
-- Call the worker procedure
|
||||||
@@ -570,26 +652,30 @@ AS
|
|||||||
);
|
);
|
||||||
|
|
||||||
-- Mark chunk as COMPLETED
|
-- Mark chunk as COMPLETED
|
||||||
|
-- CRITICAL: Use both CHUNK_ID AND TASK_NAME for session isolation
|
||||||
UPDATE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
UPDATE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||||
SET STATUS = 'COMPLETED',
|
SET STATUS = 'COMPLETED',
|
||||||
EXPORT_TIMESTAMP = SYSTIMESTAMP,
|
EXPORT_TIMESTAMP = SYSTIMESTAMP,
|
||||||
ERROR_MESSAGE = NULL
|
ERROR_MESSAGE = NULL
|
||||||
WHERE CHUNK_ID = pStartId;
|
WHERE CHUNK_ID = pStartId
|
||||||
|
AND TASK_NAME = vTaskName;
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Completed parallel export for partition ' || vYear || '/' || vMonth, 'DEBUG', vParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Completed parallel export for partition ' || vYear || '/' || vMonth, 'DEBUG', vParameters);
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
WHEN OTHERS THEN
|
WHEN OTHERS THEN
|
||||||
-- Capture error details in variable (SQLERRM cannot be used directly in SQL)
|
-- Capture error details in variable (SQLERRM cannot be used directly in SQL)
|
||||||
vgMsgTmp := 'Parallel task error for partition ' || vYear || '/' || vMonth || ' (ChunkID: ' || pStartId || '): ' || SQLERRM || cgBL || DBMS_UTILITY.FORMAT_ERROR_BACKTRACE;
|
vgMsgTmp := 'Parallel task error for partition ' || vYear || '/' || vMonth || ' (ChunkID: ' || pStartId || ', TaskName: ' || vTaskName || '): ' || SQLERRM || cgBL || DBMS_UTILITY.FORMAT_ERROR_BACKTRACE;
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||||
|
|
||||||
-- Mark chunk as FAILED with error message
|
-- Mark chunk as FAILED with error message
|
||||||
|
-- CRITICAL: Use both CHUNK_ID AND TASK_NAME for session isolation
|
||||||
-- Use vgMsgTmp variable instead of SQLERRM directly (Oracle limitation in SQL context)
|
-- Use vgMsgTmp variable instead of SQLERRM directly (Oracle limitation in SQL context)
|
||||||
UPDATE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
UPDATE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||||
SET STATUS = 'FAILED',
|
SET STATUS = 'FAILED',
|
||||||
ERROR_MESSAGE = SUBSTR(vgMsgTmp, 1, 4000)
|
ERROR_MESSAGE = SUBSTR(vgMsgTmp, 1, 4000)
|
||||||
WHERE CHUNK_ID = pStartId;
|
WHERE CHUNK_ID = pStartId
|
||||||
|
AND TASK_NAME = vTaskName;
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
RAISE;
|
RAISE;
|
||||||
@@ -1056,8 +1142,8 @@ AS
|
|||||||
-- Populate chunks table (insert new chunks, preserve FAILED chunks for retry)
|
-- Populate chunks table (insert new chunks, preserve FAILED chunks for retry)
|
||||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||||
MERGE INTO CT_MRDS.A_PARALLEL_EXPORT_CHUNKS t
|
MERGE INTO CT_MRDS.A_PARALLEL_EXPORT_CHUNKS t
|
||||||
USING (SELECT i AS chunk_id, vPartitions(i).year AS yr, vPartitions(i).month AS mn FROM DUAL) s
|
USING (SELECT i AS chunk_id, vTaskName AS task_name, vPartitions(i).year AS yr, vPartitions(i).month AS mn FROM DUAL) s
|
||||||
ON (t.CHUNK_ID = s.chunk_id)
|
ON (t.CHUNK_ID = s.chunk_id AND t.TASK_NAME = s.task_name)
|
||||||
WHEN NOT MATCHED THEN
|
WHEN NOT MATCHED THEN
|
||||||
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
||||||
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
||||||
@@ -1066,33 +1152,34 @@ AS
|
|||||||
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
||||||
pCredentialName, 'PARQUET', NULL, pTemplateTableName, 104857600, pJobClass, 'PENDING')
|
pCredentialName, 'PARQUET', NULL, pTemplateTableName, 104857600, pJobClass, 'PENDING')
|
||||||
WHEN MATCHED THEN
|
WHEN MATCHED THEN
|
||||||
UPDATE SET TASK_NAME = vTaskName,
|
-- Match found: chunk exists for SAME task (composite PK: TASK_NAME, CHUNK_ID)
|
||||||
STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
-- This handles retry scenario: reset FAILED chunks to PENDING for re-processing
|
||||||
|
UPDATE SET STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
||||||
ERROR_MESSAGE = CASE WHEN t.STATUS = 'FAILED' THEN NULL ELSE t.ERROR_MESSAGE END;
|
ERROR_MESSAGE = CASE WHEN t.STATUS = 'FAILED' THEN NULL ELSE t.ERROR_MESSAGE END;
|
||||||
END LOOP;
|
END LOOP;
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
-- Log chunk statistics
|
-- Log chunk statistics (session-safe: only count chunks for THIS task)
|
||||||
DECLARE
|
DECLARE
|
||||||
vPendingCount NUMBER;
|
vPendingCount NUMBER;
|
||||||
vFailedCount NUMBER;
|
vFailedCount NUMBER;
|
||||||
BEGIN
|
BEGIN
|
||||||
SELECT COUNT(*) INTO vPendingCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'PENDING';
|
SELECT COUNT(*) INTO vPendingCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'PENDING' AND TASK_NAME = vTaskName;
|
||||||
SELECT COUNT(*) INTO vFailedCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'FAILED';
|
SELECT COUNT(*) INTO vFailedCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'FAILED' AND TASK_NAME = vTaskName;
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Chunk statistics: PENDING=' || vPendingCount || ', FAILED (retry)=' || vFailedCount, 'INFO', vParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Chunk statistics for task ' || vTaskName || ': PENDING=' || vPendingCount || ', FAILED (retry)=' || vFailedCount, 'INFO', vParameters);
|
||||||
END;
|
END;
|
||||||
|
|
||||||
-- Create parallel task
|
-- Create parallel task
|
||||||
DBMS_PARALLEL_EXECUTE.CREATE_TASK(task_name => vTaskName);
|
DBMS_PARALLEL_EXECUTE.CREATE_TASK(task_name => vTaskName);
|
||||||
|
|
||||||
-- Define chunks by number range (1 to partition count)
|
-- Define chunks using SQL query to ensure TASK_NAME isolation
|
||||||
DBMS_PARALLEL_EXECUTE.CREATE_CHUNKS_BY_NUMBER_COL(
|
-- CRITICAL: Filter by TASK_NAME to avoid selecting chunks from other concurrent sessions
|
||||||
|
-- CRITICAL: Use START_ID and END_ID aliases to avoid ORA-00960 ambiguous column naming
|
||||||
|
DBMS_PARALLEL_EXECUTE.CREATE_CHUNKS_BY_SQL(
|
||||||
task_name => vTaskName,
|
task_name => vTaskName,
|
||||||
table_owner => 'CT_MRDS',
|
sql_stmt => 'SELECT CHUNK_ID AS START_ID, CHUNK_ID AS END_ID FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE TASK_NAME = ''' || vTaskName || ''' ORDER BY CHUNK_ID',
|
||||||
table_name => 'A_PARALLEL_EXPORT_CHUNKS',
|
by_rowid => FALSE
|
||||||
table_column => 'CHUNK_ID',
|
|
||||||
chunk_size => 1 -- Each partition is one chunk
|
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Execute task in parallel
|
-- Execute task in parallel
|
||||||
@@ -1101,7 +1188,7 @@ AS
|
|||||||
IF pJobClass IS NOT NULL THEN
|
IF pJobClass IS NOT NULL THEN
|
||||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||||
task_name => vTaskName,
|
task_name => vTaskName,
|
||||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id, ''' || vTaskName || '''); END;',
|
||||||
language_flag => DBMS_SQL.NATIVE,
|
language_flag => DBMS_SQL.NATIVE,
|
||||||
parallel_level => pParallelDegree,
|
parallel_level => pParallelDegree,
|
||||||
job_class => pJobClass
|
job_class => pJobClass
|
||||||
@@ -1109,7 +1196,7 @@ AS
|
|||||||
ELSE
|
ELSE
|
||||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||||
task_name => vTaskName,
|
task_name => vTaskName,
|
||||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id, ''' || vTaskName || '''); END;',
|
||||||
language_flag => DBMS_SQL.NATIVE,
|
language_flag => DBMS_SQL.NATIVE,
|
||||||
parallel_level => pParallelDegree
|
parallel_level => pParallelDegree
|
||||||
);
|
);
|
||||||
@@ -1360,8 +1447,8 @@ AS
|
|||||||
-- Populate chunks table (insert new chunks, preserve FAILED chunks for retry)
|
-- Populate chunks table (insert new chunks, preserve FAILED chunks for retry)
|
||||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||||
MERGE INTO CT_MRDS.A_PARALLEL_EXPORT_CHUNKS t
|
MERGE INTO CT_MRDS.A_PARALLEL_EXPORT_CHUNKS t
|
||||||
USING (SELECT i AS chunk_id, vPartitions(i).year AS yr, vPartitions(i).month AS mn FROM DUAL) s
|
USING (SELECT i AS chunk_id, vTaskName AS task_name, vPartitions(i).year AS yr, vPartitions(i).month AS mn FROM DUAL) s
|
||||||
ON (t.CHUNK_ID = s.chunk_id)
|
ON (t.CHUNK_ID = s.chunk_id AND t.TASK_NAME = s.task_name)
|
||||||
WHEN NOT MATCHED THEN
|
WHEN NOT MATCHED THEN
|
||||||
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
||||||
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
||||||
@@ -1370,33 +1457,34 @@ AS
|
|||||||
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
||||||
pCredentialName, 'CSV', vFileBaseName, pTemplateTableName, pMaxFileSize, pJobClass, 'PENDING')
|
pCredentialName, 'CSV', vFileBaseName, pTemplateTableName, pMaxFileSize, pJobClass, 'PENDING')
|
||||||
WHEN MATCHED THEN
|
WHEN MATCHED THEN
|
||||||
UPDATE SET TASK_NAME = vTaskName,
|
-- Match found: chunk exists for SAME task (composite PK: TASK_NAME, CHUNK_ID)
|
||||||
STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
-- This handles retry scenario: reset FAILED chunks to PENDING for re-processing
|
||||||
|
UPDATE SET STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
||||||
ERROR_MESSAGE = CASE WHEN t.STATUS = 'FAILED' THEN NULL ELSE t.ERROR_MESSAGE END;
|
ERROR_MESSAGE = CASE WHEN t.STATUS = 'FAILED' THEN NULL ELSE t.ERROR_MESSAGE END;
|
||||||
END LOOP;
|
END LOOP;
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
-- Log chunk statistics
|
-- Log chunk statistics (session-safe: only count chunks for THIS task)
|
||||||
DECLARE
|
DECLARE
|
||||||
vPendingCount NUMBER;
|
vPendingCount NUMBER;
|
||||||
vFailedCount NUMBER;
|
vFailedCount NUMBER;
|
||||||
BEGIN
|
BEGIN
|
||||||
SELECT COUNT(*) INTO vPendingCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'PENDING';
|
SELECT COUNT(*) INTO vPendingCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'PENDING' AND TASK_NAME = vTaskName;
|
||||||
SELECT COUNT(*) INTO vFailedCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'FAILED';
|
SELECT COUNT(*) INTO vFailedCount FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'FAILED' AND TASK_NAME = vTaskName;
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Chunk statistics: PENDING=' || vPendingCount || ', FAILED (retry)=' || vFailedCount, 'INFO', vParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Chunk statistics for task ' || vTaskName || ': PENDING=' || vPendingCount || ', FAILED (retry)=' || vFailedCount, 'INFO', vParameters);
|
||||||
END;
|
END;
|
||||||
|
|
||||||
-- Create parallel task
|
-- Create parallel task
|
||||||
DBMS_PARALLEL_EXECUTE.CREATE_TASK(task_name => vTaskName);
|
DBMS_PARALLEL_EXECUTE.CREATE_TASK(task_name => vTaskName);
|
||||||
|
|
||||||
-- Define chunks by number range (1 to partition count)
|
-- Define chunks using SQL query to ensure TASK_NAME isolation
|
||||||
DBMS_PARALLEL_EXECUTE.CREATE_CHUNKS_BY_NUMBER_COL(
|
-- CRITICAL: Filter by TASK_NAME to avoid selecting chunks from other concurrent sessions
|
||||||
|
-- CRITICAL: Use START_ID and END_ID aliases to avoid ORA-00960 ambiguous column naming
|
||||||
|
DBMS_PARALLEL_EXECUTE.CREATE_CHUNKS_BY_SQL(
|
||||||
task_name => vTaskName,
|
task_name => vTaskName,
|
||||||
table_owner => 'CT_MRDS',
|
sql_stmt => 'SELECT CHUNK_ID AS START_ID, CHUNK_ID AS END_ID FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE TASK_NAME = ''' || vTaskName || ''' ORDER BY CHUNK_ID',
|
||||||
table_name => 'A_PARALLEL_EXPORT_CHUNKS',
|
by_rowid => FALSE
|
||||||
table_column => 'CHUNK_ID',
|
|
||||||
chunk_size => 1 -- Each partition is one chunk
|
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Execute task in parallel
|
-- Execute task in parallel
|
||||||
@@ -1405,7 +1493,7 @@ AS
|
|||||||
IF pJobClass IS NOT NULL THEN
|
IF pJobClass IS NOT NULL THEN
|
||||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||||
task_name => vTaskName,
|
task_name => vTaskName,
|
||||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id, ''' || vTaskName || '''); END;',
|
||||||
language_flag => DBMS_SQL.NATIVE,
|
language_flag => DBMS_SQL.NATIVE,
|
||||||
parallel_level => pParallelDegree,
|
parallel_level => pParallelDegree,
|
||||||
job_class => pJobClass
|
job_class => pJobClass
|
||||||
@@ -1413,7 +1501,7 @@ AS
|
|||||||
ELSE
|
ELSE
|
||||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||||
task_name => vTaskName,
|
task_name => vTaskName,
|
||||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id, ''' || vTaskName || '''); END;',
|
||||||
language_flag => DBMS_SQL.NATIVE,
|
language_flag => DBMS_SQL.NATIVE,
|
||||||
parallel_level => pParallelDegree
|
parallel_level => pParallelDegree
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -9,17 +9,17 @@ AS
|
|||||||
**/
|
**/
|
||||||
|
|
||||||
-- Package Version Information
|
-- Package Version Information
|
||||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.11.0';
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.14.0';
|
||||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-18 10:00:00';
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-25 09:00:00';
|
||||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
-- Version History (last 3-5 changes)
|
-- Version History (last 3-5 changes)
|
||||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
'v2.11.0 (2026-02-18): Added pJobClass parameter to EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE for Oracle Scheduler job class support (resource/priority management).' || CHR(10) ||
|
'v2.14.0 (2026-02-25): OPTIMIZATION - Added pTaskName parameter to EXPORT_PARTITION_PARALLEL for deterministic filtering. Replaced FETCH FIRST 1 ROW ONLY safeguard with precise WHERE CHUNK_ID AND TASK_NAME filter. Eliminates ORDER BY overhead and provides cleaner session isolation.' || CHR(10) ||
|
||||||
'v2.10.1 (2026-02-17): CRITICAL FIX - Remove redundant COMPLETED chunks deletion before parallel export that caused ORA-01403 errors (phantom chunks created by CREATE_CHUNKS_BY_NUMBER_COL).' || CHR(10) ||
|
'v2.13.1 (2026-02-25): CRITICAL FIX - Added START_ID and END_ID aliasses in CREATE_CHUNKS_BY_SQL to avoid ORA-00960 ambiguous column naming error.' || CHR(10) ||
|
||||||
'v2.10.0 (2026-02-13): CRITICAL FIX - Register ALL files created by DBMS_CLOUD.EXPORT_DATA (multi-file support due to Oracle parallel processing on large instances). Prevents orphaned files in rollback.' || CHR(10) ||
|
'v2.13.0 (2026-02-25): CRITICAL SESSION ISOLATION FIX - Changed CREATE_CHUNKS_BY_NUMBER_COL to CREATE_CHUNKS_BY_SQL with TASK_NAME filter (fixes ORA-01422 in concurrent sessions). Added ORDER BY CREATED_DATE DESC FETCH FIRST 1 ROW safeguard to EXPORT_PARTITION_PARALLEL SELECT. Composite PK (TASK_NAME, CHUNK_ID) now fully functional.' || CHR(10) ||
|
||||||
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
|
'v2.12.0 (2026-02-24): CRITICAL FIX - Rewritten DELETE_FAILED_EXPORT_FILE to use file-specific pattern matching (prevents deleting parallel CSV chunks in shared folder). Added vQuery logging before DBMS_CLOUD calls. Added CSV maxfilesize logging.' || CHR(10) ||
|
||||||
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10);
|
'v2.11.0 (2026-02-18): Added pJobClass parameter to EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE for Oracle Scheduler job class support (resource/priority management).' || CHR(10);
|
||||||
|
|
||||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||||
vgMsgTmp VARCHAR2(32000);
|
vgMsgTmp VARCHAR2(32000);
|
||||||
@@ -54,10 +54,12 @@ AS
|
|||||||
* but should NOT be called directly by external code.
|
* but should NOT be called directly by external code.
|
||||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||||
|
* @param pTaskName - Task name for session isolation (optional, DEFAULT NULL for backward compatibility)
|
||||||
**/
|
**/
|
||||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||||
pStartId IN NUMBER,
|
pStartId IN NUMBER,
|
||||||
pEndId IN NUMBER
|
pEndId IN NUMBER,
|
||||||
|
pTaskName IN VARCHAR2 DEFAULT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -1,125 +1,31 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-835: Export Group 1 - Split DATA + HIST (DEBT, DEBT_DAILY)
|
-- MARS-835: Export Group 1 - HIST Only (DEBT, DEBT_DAILY)
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Export last 6 months to DATA bucket (CSV), older data to HIST bucket (Parquet)
|
-- Purpose: Export ALL data to HIST bucket (Parquet with Hive-style partitioning)
|
||||||
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||||
-- Excludes legacy columns not required in new structure
|
-- Excludes legacy columns not required in new structure
|
||||||
-- USES: DATA_EXPORTER v2.4.0 with pTemplateTableName for column order and date formats
|
-- USES: DATA_EXPORTER v2.12.0 with pTemplateTableName for column order and date formats
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-12-17
|
-- Date: 2025-12-17
|
||||||
-- Updated: 2026-01-11 (Updated to DATA_EXPORTER v2.4.0 with pTemplateTableName)
|
-- Updated: 2026-02-24 (Changed to HIST-only export, no DATA bucket split)
|
||||||
-- Related: MARS-835 - CSDB Data Export
|
-- Related: MARS-835 - CSDB Data Export
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
SET TIMING ON
|
SET TIMING ON
|
||||||
|
|
||||||
DEFINE cutoff_date = "TRUNC(ADD_MONTHS(SYSDATE, -6), 'MM')"
|
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Exporting CSDB.DEBT - Split DATA + HIST
|
PROMPT Exporting CSDB.DEBT - HIST Only
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
PROMPT ALL data to HIST bucket (Parquet with Hive-style partitioning)
|
||||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
|
||||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||||
PROMPT Excluded columns: IDIRDEPOSITORY, VA_BONDDURATION
|
PROMPT Excluded columns: IDIRDEPOSITORY, VA_BONDDURATION
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
-- PRE-EXPORT CHECK: List existing files and count records
|
-- Export ALL data to HIST bucket (Parquet)
|
||||||
DECLARE
|
-- NEW v2.12.0: Per-column date format handling with template table, full data range
|
||||||
vFileCount NUMBER := 0;
|
|
||||||
vRecordCount NUMBER := 0;
|
|
||||||
vLocationUri VARCHAR2(1000);
|
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URI for DATA bucket
|
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to HIST bucket (ALL data)...');
|
||||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT/';
|
|
||||||
|
|
||||||
-- Count existing files
|
|
||||||
SELECT COUNT(*)
|
|
||||||
INTO vFileCount
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
|
||||||
|
|
||||||
IF vFileCount > 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
|
|
||||||
-- List existing files
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'
|
|
||||||
ORDER BY object_name
|
|
||||||
) LOOP
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- Count records in external table
|
|
||||||
BEGIN
|
|
||||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS' INTO vRecordCount;
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
|
||||||
END;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export recent data to DATA bucket (CSV)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to DATA bucket (last 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
|
||||||
pSchemaName => 'OU_CSDB',
|
|
||||||
pTableName => 'LEGACY_DEBT',
|
|
||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
|
||||||
pBucketArea => 'DATA',
|
|
||||||
pFolderName => 'ODS/CSDB/CSDB_DEBT',
|
|
||||||
pMinDate => &cutoff_date,
|
|
||||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
|
||||||
pParallelDegree => 16,
|
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
|
||||||
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
|
||||||
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
|
||||||
pProcessName => 'MARS-835', -- Process identifier for tracking
|
|
||||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
|
||||||
);
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to DATA bucket with template column order');
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export historical data to HIST bucket (Parquet)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to HIST bucket (older than 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
@@ -128,7 +34,8 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||||
pMaxDate => &cutoff_date,
|
pMinDate => DATE '1900-01-01', -- Include all historical data
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include all future dates
|
||||||
pParallelDegree => 16,
|
pParallelDegree => 16,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
@@ -139,110 +46,18 @@ END;
|
|||||||
/
|
/
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Exporting CSDB.LEGACY_DEBT_DAILY - Split DATA + HIST
|
PROMPT Exporting CSDB.LEGACY_DEBT_DAILY - HIST Only
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
PROMPT ALL data to HIST bucket (Parquet with Hive-style partitioning)
|
||||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
|
||||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||||
PROMPT Excluded columns: STEPID, PROGRAMNAME, PROGRAMCEILING, PROGRAMSTATUS,
|
PROMPT Excluded columns: STEPID, PROGRAMNAME, PROGRAMCEILING, PROGRAMSTATUS,
|
||||||
PROMPT ISSUERNACE21SECTOR, INSTRUMENTQUOTATIONBASIS
|
PROMPT ISSUERNACE21SECTOR, INSTRUMENTQUOTATIONBASIS
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
-- PRE-EXPORT CHECK: List existing files and count records
|
-- Export ALL data to HIST bucket (Parquet)
|
||||||
DECLARE
|
-- NEW v2.12.0: Per-column date format handling with template table, full data range
|
||||||
vFileCount NUMBER := 0;
|
|
||||||
vRecordCount NUMBER := 0;
|
|
||||||
vLocationUri VARCHAR2(1000);
|
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URI for DATA bucket
|
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to HIST bucket (ALL data)...');
|
||||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT_DAILY/';
|
|
||||||
|
|
||||||
-- Count existing files
|
|
||||||
SELECT COUNT(*)
|
|
||||||
INTO vFileCount
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
|
||||||
|
|
||||||
IF vFileCount > 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
|
|
||||||
-- List existing files
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
|
||||||
location_uri => vLocationUri
|
|
||||||
))
|
|
||||||
WHERE object_name NOT LIKE '%/'
|
|
||||||
ORDER BY object_name
|
|
||||||
) LOOP
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- Count records in external table
|
|
||||||
BEGIN
|
|
||||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_DAILY_ODS' INTO vRecordCount;
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
|
||||||
END;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export recent data to DATA bucket (CSV)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to DATA bucket (last 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
|
||||||
pSchemaName => 'OU_CSDB',
|
|
||||||
pTableName => 'LEGACY_DEBT_DAILY',
|
|
||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
|
||||||
pBucketArea => 'DATA',
|
|
||||||
pFolderName => 'ODS/CSDB/CSDB_DEBT_DAILY',
|
|
||||||
pMinDate => &cutoff_date,
|
|
||||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
|
||||||
pParallelDegree => 16,
|
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
|
||||||
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
|
||||||
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
|
||||||
pProcessName => 'MARS-835', -- Process identifier for tracking
|
|
||||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
|
||||||
);
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to DATA bucket with template column order');
|
|
||||||
END;
|
|
||||||
/
|
|
||||||
|
|
||||||
-- Export historical data to HIST bucket (Parquet)
|
|
||||||
-- NEW v2.4.0: Per-column date format handling with template table
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to HIST bucket (older than 6 months)...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
||||||
|
|
||||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||||
@@ -251,7 +66,8 @@ BEGIN
|
|||||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||||
pBucketArea => 'ARCHIVE',
|
pBucketArea => 'ARCHIVE',
|
||||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
||||||
pMaxDate => &cutoff_date,
|
pMinDate => DATE '1900-01-01', -- Include all historical data
|
||||||
|
pMaxDate => DATE '9999-12-31', -- Include all future dates
|
||||||
pParallelDegree => 16,
|
pParallelDegree => 16,
|
||||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||||
@@ -264,8 +80,8 @@ END;
|
|||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT Group 1 Export Completed
|
PROMPT Group 1 Export Completed
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT - LEGACY_DEBT: DATA + HIST exported
|
PROMPT - LEGACY_DEBT: HIST exported (ALL data)
|
||||||
PROMPT - LEGACY_DEBT_DAILY: DATA + HIST exported
|
PROMPT - LEGACY_DEBT_DAILY: HIST exported (ALL data)
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
-- Script: 03_MARS_835_verify_exports.sql
|
-- Script: 03_MARS_835_verify_exports.sql
|
||||||
-- Purpose: Verify exported files exist in DATA and HIST buckets after export
|
-- Purpose: Verify exported files exist in HIST bucket after export (HIST-only strategy)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Created: 2025-12-17
|
-- Created: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only verification)
|
||||||
-- MARS Issue: MARS-835
|
-- MARS Issue: MARS-835
|
||||||
-- Target Locations: mrds_data_dev/ODS/CSDB/, mrds_hist_dev/ARCHIVE/CSDB/
|
-- Target Locations: mrds_hist_dev/ARCHIVE/CSDB/
|
||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||||
@@ -13,17 +14,14 @@ SET VERIFY OFF;
|
|||||||
SET LINESIZE 200;
|
SET LINESIZE 200;
|
||||||
|
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
PROMPT MARS-835 Verification: Listing exported files in DATA and HIST buckets
|
PROMPT MARS-835 Verification: Listing exported files in HIST bucket (HIST-only strategy)
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
vFileCount NUMBER := 0;
|
vFileCount NUMBER := 0;
|
||||||
vTotalDataFiles NUMBER := 0;
|
|
||||||
vTotalHistFiles NUMBER := 0;
|
vTotalHistFiles NUMBER := 0;
|
||||||
vTotalDataSize NUMBER := 0;
|
|
||||||
vTotalHistSize NUMBER := 0;
|
vTotalHistSize NUMBER := 0;
|
||||||
|
|
||||||
TYPE t_folder_info IS RECORD (
|
TYPE t_folder_info IS RECORD (
|
||||||
@@ -33,25 +31,18 @@ DECLARE
|
|||||||
);
|
);
|
||||||
TYPE t_folder_list IS TABLE OF t_folder_info;
|
TYPE t_folder_list IS TABLE OF t_folder_info;
|
||||||
|
|
||||||
vDataFolders t_folder_list;
|
|
||||||
vHistFolders t_folder_list;
|
vHistFolders t_folder_list;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs and credential from FILE_MANAGER
|
-- Get bucket URI and credential from FILE_MANAGER
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
-- Initialize folder lists
|
-- Initialize folder list (all tables in HIST)
|
||||||
vDataFolders := t_folder_list(
|
-- Initialize folder list (all 6 tables in HIST)
|
||||||
t_folder_info('ODS/CSDB/CSDB_DEBT/', 'DEBT', 'CSV'),
|
|
||||||
t_folder_info('ODS/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'CSV')
|
|
||||||
);
|
|
||||||
|
|
||||||
vHistFolders := t_folder_list(
|
vHistFolders := t_folder_list(
|
||||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT/', 'DEBT', 'Parquet'),
|
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT/', 'DEBT', 'Parquet'),
|
||||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'Parquet'),
|
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'Parquet'),
|
||||||
@@ -62,49 +53,7 @@ BEGIN
|
|||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket Exports (CSV format - last 6 months)');
|
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket Exports (Parquet with Hive partitioning - ALL data)');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
|
|
||||||
-- Check DATA bucket exports
|
|
||||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
|
||||||
vFileCount := 0;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Table: ' || vDataFolders(i).table_name || ' (' || vDataFolders(i).expected_format || ')');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i).folder_name);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
|
||||||
|
|
||||||
BEGIN
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || vDataFolders(i).folder_name
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE '%.csv'
|
|
||||||
ORDER BY created DESC
|
|
||||||
) LOOP
|
|
||||||
vFileCount := vFileCount + 1;
|
|
||||||
vTotalDataFiles := vTotalDataFiles + 1;
|
|
||||||
vTotalDataSize := vTotalDataSize + rec.bytes;
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
|
||||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
IF vFileCount = 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] No CSV files found - Export may have failed!');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || ' CSV file(s)');
|
|
||||||
END IF;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] Cannot access folder - ' || SQLERRM);
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket Exports (Parquet with Hive partitioning)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
|
||||||
-- Check HIST bucket exports
|
-- Check HIST bucket exports
|
||||||
@@ -155,24 +104,19 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Export Verification Summary');
|
DBMS_OUTPUT.PUT_LINE('Export Verification Summary');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket (CSV):');
|
DBMS_OUTPUT.PUT_LINE('HIST Bucket (Parquet - HIST-only strategy):');
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalDataFiles);
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalDataSize/1024/1024/1024, 2) || ' GB');
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 2 (DEBT, DEBT_DAILY - last 6 months)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket (Parquet):');
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalHistFiles || '+');
|
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalHistFiles || '+');
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalHistSize/1024/1024/1024, 2) || '+ GB (sample)');
|
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalHistSize/1024/1024/1024, 2) || '+ GB (sample)');
|
||||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 6 (all CSDB tables with historical data)');
|
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 6 (all CSDB tables exported to HIST)');
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
IF vTotalDataFiles >= 2 AND vTotalHistFiles >= 6 THEN
|
IF vTotalHistFiles >= 6 THEN
|
||||||
DBMS_OUTPUT.PUT_LINE('[SUCCESS] OVERALL STATUS: Export appears SUCCESSFUL');
|
DBMS_OUTPUT.PUT_LINE('[SUCCESS] OVERALL STATUS: Export appears SUCCESSFUL');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Files found in both DATA and HIST buckets');
|
DBMS_OUTPUT.PUT_LINE(' Files found in HIST bucket for all tables');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Proceed to record count verification (Step 4)');
|
DBMS_OUTPUT.PUT_LINE(' Proceed to record count verification (Step 4)');
|
||||||
ELSIF vTotalDataFiles = 0 AND vTotalHistFiles = 0 THEN
|
ELSIF vTotalHistFiles = 0 THEN
|
||||||
DBMS_OUTPUT.PUT_LINE('[FAILED] OVERALL STATUS: Export FAILED');
|
DBMS_OUTPUT.PUT_LINE('[FAILED] OVERALL STATUS: Export FAILED');
|
||||||
DBMS_OUTPUT.PUT_LINE(' No files found in either bucket');
|
DBMS_OUTPUT.PUT_LINE(' No files found in HIST bucket');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Review export logs for errors');
|
DBMS_OUTPUT.PUT_LINE(' Review export logs for errors');
|
||||||
ELSE
|
ELSE
|
||||||
DBMS_OUTPUT.PUT_LINE('[WARNING] OVERALL STATUS: Partial export detected');
|
DBMS_OUTPUT.PUT_LINE('[WARNING] OVERALL STATUS: Partial export detected');
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
-- Script: 04_MARS_835_verify_record_counts.sql
|
-- Script: 04_MARS_835_verify_record_counts.sql
|
||||||
-- Purpose: Verify record counts match between source tables and exported data
|
-- Purpose: Verify record counts match between source tables and exported data (HIST-only)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Created: 2025-12-17
|
-- Created: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only verification)
|
||||||
-- MARS Issue: MARS-835
|
-- MARS Issue: MARS-835
|
||||||
-- Verification: Compare OU_CSDB source tables with ODS external tables
|
-- Verification: Compare OU_CSDB source tables with ODS external tables (HIST only)
|
||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||||
@@ -13,28 +14,23 @@ SET VERIFY OFF;
|
|||||||
SET LINESIZE 200;
|
SET LINESIZE 200;
|
||||||
|
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
PROMPT MARS-835 Record Count Verification
|
PROMPT MARS-835 Record Count Verification (HIST-only strategy)
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
PROMPT Comparing source table counts with exported external table counts
|
PROMPT Comparing source table counts with HIST external table counts
|
||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
TYPE t_table_info IS RECORD (
|
TYPE t_table_info IS RECORD (
|
||||||
source_schema VARCHAR2(50),
|
source_schema VARCHAR2(50),
|
||||||
source_table VARCHAR2(100),
|
source_table VARCHAR2(100),
|
||||||
data_external_table VARCHAR2(100),
|
hist_external_table VARCHAR2(100)
|
||||||
hist_external_table VARCHAR2(100),
|
|
||||||
has_data_export BOOLEAN,
|
|
||||||
has_hist_export BOOLEAN
|
|
||||||
);
|
);
|
||||||
TYPE t_table_list IS TABLE OF t_table_info;
|
TYPE t_table_list IS TABLE OF t_table_info;
|
||||||
|
|
||||||
vTables t_table_list;
|
vTables t_table_list;
|
||||||
vSourceCount NUMBER;
|
vSourceCount NUMBER;
|
||||||
vDataCount NUMBER;
|
|
||||||
vHistCount NUMBER;
|
vHistCount NUMBER;
|
||||||
vTotalSourceCount NUMBER := 0;
|
vTotalSourceCount NUMBER := 0;
|
||||||
vTotalDataCount NUMBER := 0;
|
|
||||||
vTotalHistCount NUMBER := 0;
|
vTotalHistCount NUMBER := 0;
|
||||||
vMismatchCount NUMBER := 0;
|
vMismatchCount NUMBER := 0;
|
||||||
vSql VARCHAR2(4000);
|
vSql VARCHAR2(4000);
|
||||||
@@ -42,18 +38,18 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
-- Initialize table list with export configuration
|
-- Initialize table list (all tables HIST-only)
|
||||||
vTables := t_table_list(
|
vTables := t_table_list(
|
||||||
t_table_info('OU_CSDB', 'LEGACY_DEBT', 'ODS.CSDB_DEBT_ODS', 'ODS.CSDB_DEBT_ARCHIVE', TRUE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_DEBT', 'ODS.CSDB_DEBT_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_DEBT_DAILY', 'ODS.CSDB_DEBT_DAILY_ODS', 'ODS.CSDB_DEBT_DAILY_ARCHIVE', TRUE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_DEBT_DAILY', 'ODS.CSDB_DEBT_DAILY_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_RAT_FULL', NULL, 'ODS.CSDB_INSTR_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_INSTR_RAT_FULL', 'ODS.CSDB_INSTR_RAT_FULL_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_DESC_FULL', NULL, 'ODS.CSDB_INSTR_DESC_FULL_ARCHIVE', FALSE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_INSTR_DESC_FULL', 'ODS.CSDB_INSTR_DESC_FULL_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_RAT_FULL', NULL, 'ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
t_table_info('OU_CSDB', 'LEGACY_ISSUER_RAT_FULL', 'ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE'),
|
||||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_DESC_FULL', NULL, 'ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE', FALSE, TRUE)
|
t_table_info('OU_CSDB', 'LEGACY_ISSUER_DESC_FULL', 'ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE')
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
DBMS_OUTPUT.PUT_LINE('Table Name Source Count DATA Count HIST Count Status');
|
DBMS_OUTPUT.PUT_LINE('Table Name Source Count HIST Count Status');
|
||||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
|
|
||||||
FOR i IN 1..vTables.COUNT LOOP
|
FOR i IN 1..vTables.COUNT LOOP
|
||||||
@@ -70,31 +66,6 @@ BEGIN
|
|||||||
CONTINUE;
|
CONTINUE;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
-- Get DATA external table count (if applicable)
|
|
||||||
IF vTables(i).has_data_export THEN
|
|
||||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).data_external_table;
|
|
||||||
BEGIN
|
|
||||||
EXECUTE IMMEDIATE vSql INTO vDataCount;
|
|
||||||
vTotalDataCount := vTotalDataCount + vDataCount;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
-- If source table is empty (0 records), no files were exported
|
|
||||||
-- External table returns error, treat as 0
|
|
||||||
-- Acceptable error codes:
|
|
||||||
-- ORA-29913: error in executing ODCIEXTTABLEOPEN callout
|
|
||||||
-- ORA-29400: data cartridge error
|
|
||||||
-- KUP-13023: nothing matched wildcard query (no files in bucket)
|
|
||||||
-- NOTE: ORA-30653 (reject limit) is a real data quality error, not treated as empty
|
|
||||||
IF vSourceCount = 0 OR SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
|
||||||
vDataCount := 0;
|
|
||||||
ELSE
|
|
||||||
vDataCount := -1;
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
ELSE
|
|
||||||
vDataCount := NULL;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Get HIST external table count
|
-- Get HIST external table count
|
||||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).hist_external_table;
|
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).hist_external_table;
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -119,18 +90,8 @@ BEGIN
|
|||||||
-- Display results
|
-- Display results
|
||||||
DECLARE
|
DECLARE
|
||||||
vStatus VARCHAR2(20);
|
vStatus VARCHAR2(20);
|
||||||
vDataDisplay VARCHAR2(17);
|
|
||||||
vHistDisplay VARCHAR2(17);
|
vHistDisplay VARCHAR2(17);
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Format DATA count display
|
|
||||||
IF vDataCount IS NULL THEN
|
|
||||||
vDataDisplay := 'N/A';
|
|
||||||
ELSIF vDataCount = -1 THEN
|
|
||||||
vDataDisplay := 'ERROR';
|
|
||||||
ELSE
|
|
||||||
vDataDisplay := TO_CHAR(vDataCount, '9,999,999,999');
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Format HIST count display
|
-- Format HIST count display
|
||||||
IF vHistCount = -1 THEN
|
IF vHistCount = -1 THEN
|
||||||
vHistDisplay := 'ERROR';
|
vHistDisplay := 'ERROR';
|
||||||
@@ -138,35 +99,20 @@ BEGIN
|
|||||||
vHistDisplay := TO_CHAR(vHistCount, '9,999,999,999');
|
vHistDisplay := TO_CHAR(vHistCount, '9,999,999,999');
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Determine status
|
-- Determine status (HIST only: check HIST = SOURCE)
|
||||||
IF vTables(i).has_data_export THEN
|
IF vHistCount = vSourceCount THEN
|
||||||
-- Split export: check DATA + HIST = SOURCE
|
vStatus := 'PASS';
|
||||||
IF (vDataCount + vHistCount) = vSourceCount THEN
|
ELSIF vHistCount = -1 THEN
|
||||||
vStatus := 'PASS';
|
vStatus := 'ERROR';
|
||||||
ELSIF vDataCount = -1 OR vHistCount = -1 THEN
|
vMismatchCount := vMismatchCount + 1;
|
||||||
vStatus := 'ERROR';
|
|
||||||
vMismatchCount := vMismatchCount + 1;
|
|
||||||
ELSE
|
|
||||||
vStatus := 'MISMATCH';
|
|
||||||
vMismatchCount := vMismatchCount + 1;
|
|
||||||
END IF;
|
|
||||||
ELSE
|
ELSE
|
||||||
-- HIST only: check HIST = SOURCE
|
vStatus := 'MISMATCH';
|
||||||
IF vHistCount = vSourceCount THEN
|
vMismatchCount := vMismatchCount + 1;
|
||||||
vStatus := 'PASS';
|
|
||||||
ELSIF vHistCount = -1 THEN
|
|
||||||
vStatus := 'ERROR';
|
|
||||||
vMismatchCount := vMismatchCount + 1;
|
|
||||||
ELSE
|
|
||||||
vStatus := 'MISMATCH';
|
|
||||||
vMismatchCount := vMismatchCount + 1;
|
|
||||||
END IF;
|
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE(
|
DBMS_OUTPUT.PUT_LINE(
|
||||||
RPAD(vTables(i).source_table, 24) ||
|
RPAD(vTables(i).source_table, 24) ||
|
||||||
LPAD(TO_CHAR(vSourceCount, '9,999,999,999'), 15) ||
|
LPAD(TO_CHAR(vSourceCount, '9,999,999,999'), 15) ||
|
||||||
LPAD(vDataDisplay, 15) ||
|
|
||||||
LPAD(vHistDisplay, 15) || ' ' ||
|
LPAD(vHistDisplay, 15) || ' ' ||
|
||||||
vStatus
|
vStatus
|
||||||
);
|
);
|
||||||
@@ -177,18 +123,16 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE(
|
DBMS_OUTPUT.PUT_LINE(
|
||||||
RPAD('TOTALS', 24) ||
|
RPAD('TOTALS', 24) ||
|
||||||
LPAD(TO_CHAR(vTotalSourceCount, '9,999,999,999'), 15) ||
|
LPAD(TO_CHAR(vTotalSourceCount, '9,999,999,999'), 15) ||
|
||||||
LPAD(TO_CHAR(vTotalDataCount, '9,999,999,999'), 15) ||
|
|
||||||
LPAD(TO_CHAR(vTotalHistCount, '9,999,999,999'), 15)
|
LPAD(TO_CHAR(vTotalHistCount, '9,999,999,999'), 15)
|
||||||
);
|
);
|
||||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary');
|
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary (HIST-only strategy)');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '9,999,999,999'));
|
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '9,999,999,999'));
|
||||||
DBMS_OUTPUT.PUT_LINE('Total DATA records: ' || TO_CHAR(vTotalDataCount, '9,999,999,999') || ' (last 6 months)');
|
DBMS_OUTPUT.PUT_LINE('Total HIST records: ' || TO_CHAR(vTotalHistCount, '9,999,999,999') || ' (all data in HIST)');
|
||||||
DBMS_OUTPUT.PUT_LINE('Total HIST records: ' || TO_CHAR(vTotalHistCount, '9,999,999,999') || ' (historical + full exports)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
IF vMismatchCount = 0 THEN
|
IF vMismatchCount = 0 THEN
|
||||||
@@ -209,7 +153,6 @@ BEGIN
|
|||||||
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
||||||
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (may not exist yet)');
|
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (may not exist yet)');
|
||||||
DBMS_OUTPUT.PUT_LINE(' N/A - Not applicable (table not exported to DATA)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
|
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
|
|||||||
@@ -1,68 +1,34 @@
|
|||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- MARS-835 ROLLBACK: Delete Group 1 Exported Files (DEBT, DEBT_DAILY)
|
-- MARS-835 ROLLBACK: Delete Group 1 Exported Files (DEBT, DEBT_DAILY)
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
-- Purpose: Delete exported CSV and Parquet files from DATA and HIST buckets
|
-- Purpose: Delete exported Parquet files from HIST bucket (ARCHIVE only)
|
||||||
-- WARNING: This will permanently delete exported data files!
|
-- WARNING: This will permanently delete exported data files!
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Date: 2025-12-17
|
-- Date: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only rollback, no DATA bucket)
|
||||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||||
--=============================================================================================================================
|
--=============================================================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT ROLLBACK: Deleting DEBT exported files
|
PROMPT ROLLBACK: Deleting DEBT exported files from HIST
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT WARNING: This will delete files from:
|
PROMPT WARNING: This will delete files from:
|
||||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT/
|
|
||||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/
|
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
vFileCount NUMBER := 0;
|
vFileCount NUMBER := 0;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs and credential
|
-- Get bucket URI and credential
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT CSV files from DATA bucket...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
|
|
||||||
|
|
||||||
-- Delete CSV files for DEBT from DATA bucket using LIST_OBJECTS
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE 'LEGACY_DEBT%'
|
|
||||||
) LOOP
|
|
||||||
BEGIN
|
|
||||||
DBMS_CLOUD.DELETE_OBJECT(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
|
|
||||||
);
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
|
||||||
vFileCount := vFileCount + 1;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
IF SQLCODE = -20404 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
|
||||||
ELSE
|
|
||||||
RAISE;
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT Parquet files from ARCHIVE bucket...');
|
DBMS_OUTPUT.PUT_LINE('Deleting DEBT Parquet files from ARCHIVE bucket...');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
|
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS');
|
||||||
vFileCount := 0;
|
|
||||||
|
|
||||||
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||||
FOR rec IN (
|
FOR rec IN (
|
||||||
@@ -99,58 +65,23 @@ END;
|
|||||||
/
|
/
|
||||||
|
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT ROLLBACK: Deleting DEBT_DAILY exported files
|
PROMPT ROLLBACK: Deleting DEBT_DAILY exported files from HIST
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
PROMPT WARNING: This will delete files from:
|
PROMPT WARNING: This will delete files from:
|
||||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/
|
|
||||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/
|
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/
|
||||||
PROMPT ========================================================================
|
PROMPT ========================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
vFileCount NUMBER := 0;
|
vFileCount NUMBER := 0;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs and credential
|
-- Get bucket URI and credential
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY CSV files from DATA bucket...');
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
|
|
||||||
|
|
||||||
-- Delete CSV files for DEBT_DAILY from DATA bucket using LIST_OBJECTS
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY%'
|
|
||||||
) LOOP
|
|
||||||
BEGIN
|
|
||||||
DBMS_CLOUD.DELETE_OBJECT(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
|
||||||
);
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
|
||||||
vFileCount := vFileCount + 1;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
IF SQLCODE = -20404 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
|
||||||
ELSE
|
|
||||||
RAISE;
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY Parquet files from ARCHIVE bucket...');
|
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY Parquet files from ARCHIVE bucket...');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
|
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS');
|
||||||
vFileCount := 0;
|
|
||||||
|
|
||||||
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||||
FOR rec IN (
|
FOR rec IN (
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
-- Script: 99_MARS_835_verify_rollback.sql
|
-- Script: 99_MARS_835_verify_rollback.sql
|
||||||
-- Purpose: Verify all exported files have been deleted from DATA and HIST buckets
|
-- Purpose: Verify all exported files have been deleted from HIST bucket (HIST-only strategy)
|
||||||
-- Author: Grzegorz Michalski
|
-- Author: Grzegorz Michalski
|
||||||
-- Created: 2025-12-17
|
-- Created: 2025-12-17
|
||||||
|
-- Updated: 2026-02-24 (Changed to HIST-only verification)
|
||||||
-- MARS Issue: MARS-835
|
-- MARS Issue: MARS-835
|
||||||
-- Verification: Confirm complete rollback (no CSDB files remaining)
|
-- Verification: Confirm complete rollback (no CSDB files remaining in HIST)
|
||||||
-- =====================================================================================
|
-- =====================================================================================
|
||||||
|
|
||||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||||
@@ -19,33 +20,23 @@ PROMPT Checking that all CSDB export files have been deleted
|
|||||||
PROMPT =====================================================================================
|
PROMPT =====================================================================================
|
||||||
|
|
||||||
DECLARE
|
DECLARE
|
||||||
vDataBucketUri VARCHAR2(500);
|
|
||||||
vHistBucketUri VARCHAR2(500);
|
vHistBucketUri VARCHAR2(500);
|
||||||
vCredentialName VARCHAR2(100);
|
vCredentialName VARCHAR2(100);
|
||||||
vDataFileCount NUMBER := 0;
|
|
||||||
vHistFileCount NUMBER := 0;
|
vHistFileCount NUMBER := 0;
|
||||||
vTotalFiles NUMBER := 0;
|
|
||||||
|
|
||||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||||
vDataFolders t_folder_list;
|
|
||||||
vHistFolders t_folder_list;
|
vHistFolders t_folder_list;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get bucket URIs
|
-- Get bucket URI
|
||||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
|
||||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
DBMS_OUTPUT.PUT_LINE('ROLLBACK VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
-- Initialize folder lists
|
-- Initialize folder list (all 6 tables in HIST)
|
||||||
vDataFolders := t_folder_list(
|
-- Initialize folder list (all 6 tables in HIST)
|
||||||
'ODS/CSDB/CSDB_DEBT/',
|
|
||||||
'ODS/CSDB/CSDB_DEBT_DAILY/'
|
|
||||||
);
|
|
||||||
|
|
||||||
vHistFolders := t_folder_list(
|
vHistFolders := t_folder_list(
|
||||||
'ARCHIVE/CSDB/CSDB_DEBT/',
|
'ARCHIVE/CSDB/CSDB_DEBT/',
|
||||||
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
||||||
@@ -55,47 +46,6 @@ BEGIN
|
|||||||
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||||
);
|
);
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket (should be empty)');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
|
||||||
|
|
||||||
-- Check DATA bucket
|
|
||||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
|
||||||
DECLARE
|
|
||||||
vCount NUMBER := 0;
|
|
||||||
BEGIN
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i));
|
|
||||||
|
|
||||||
FOR rec IN (
|
|
||||||
SELECT object_name
|
|
||||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
|
||||||
credential_name => vCredentialName,
|
|
||||||
location_uri => vDataBucketUri || vDataFolders(i)
|
|
||||||
))
|
|
||||||
WHERE object_name LIKE '%.csv'
|
|
||||||
) LOOP
|
|
||||||
vCount := vCount + 1;
|
|
||||||
vDataFileCount := vDataFileCount + 1;
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [FOUND] ' || rec.object_name);
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
IF vCount = 0 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [OK] No CSV files found');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Found ' || vCount || ' file(s) - may be pre-existing files from before installation');
|
|
||||||
END IF;
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
IF SQLCODE = -20404 THEN
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [OK] Folder does not exist or is empty');
|
|
||||||
ELSE
|
|
||||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] ' || SQLERRM);
|
|
||||||
END IF;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (should be empty)');
|
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (should be empty)');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
@@ -139,24 +89,21 @@ BEGIN
|
|||||||
END;
|
END;
|
||||||
END LOOP;
|
END LOOP;
|
||||||
|
|
||||||
vTotalFiles := vDataFileCount + vHistFileCount;
|
|
||||||
|
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('Rollback Verification Summary');
|
DBMS_OUTPUT.PUT_LINE('Rollback Verification Summary');
|
||||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||||
DBMS_OUTPUT.PUT_LINE('DATA bucket files remaining: ' || vDataFileCount);
|
|
||||||
DBMS_OUTPUT.PUT_LINE('HIST bucket files remaining: ' || vHistFileCount || '+');
|
DBMS_OUTPUT.PUT_LINE('HIST bucket files remaining: ' || vHistFileCount || '+');
|
||||||
DBMS_OUTPUT.PUT_LINE('Total files found: ' || vTotalFiles || '+');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
DBMS_OUTPUT.PUT_LINE('');
|
DBMS_OUTPUT.PUT_LINE('');
|
||||||
|
|
||||||
IF vTotalFiles = 0 THEN
|
IF vHistFileCount = 0 THEN
|
||||||
DBMS_OUTPUT.PUT_LINE('[PASSED] ROLLBACK VERIFICATION PASSED');
|
DBMS_OUTPUT.PUT_LINE('[PASSED] ROLLBACK VERIFICATION PASSED');
|
||||||
DBMS_OUTPUT.PUT_LINE(' All CSDB export files have been deleted or were not created');
|
DBMS_OUTPUT.PUT_LINE(' All CSDB export files have been deleted or were not created');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Buckets are clean and ready for re-export if needed');
|
DBMS_OUTPUT.PUT_LINE(' HIST bucket is clean and ready for re-export if needed');
|
||||||
ELSE
|
ELSE
|
||||||
DBMS_OUTPUT.PUT_LINE('[INFO] ROLLBACK VERIFICATION COMPLETED');
|
DBMS_OUTPUT.PUT_LINE('[INFO] ROLLBACK VERIFICATION COMPLETED');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Found ' || vTotalFiles || '+ file(s) remaining in buckets');
|
DBMS_OUTPUT.PUT_LINE(' Found ' || vHistFileCount || '+ file(s) remaining in HIST bucket');
|
||||||
DBMS_OUTPUT.PUT_LINE(' NOTE: These may be pre-existing files from before installation.');
|
DBMS_OUTPUT.PUT_LINE(' NOTE: These may be pre-existing files from before installation.');
|
||||||
DBMS_OUTPUT.PUT_LINE(' Rollback only deletes files created during this export operation.');
|
DBMS_OUTPUT.PUT_LINE(' Rollback only deletes files created during this export operation.');
|
||||||
DBMS_OUTPUT.PUT_LINE(' If needed, manually verify and clean up remaining files.');
|
DBMS_OUTPUT.PUT_LINE(' If needed, manually verify and clean up remaining files.');
|
||||||
|
|||||||
@@ -1324,7 +1324,8 @@ AS
|
|||||||
rec.quoted_column_name || ' CHAR(50) DATE_FORMAT TIMESTAMP WITH TIME ZONE MASK ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
rec.quoted_column_name || ' CHAR(50) DATE_FORMAT TIMESTAMP WITH TIME ZONE MASK ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
||||||
WHEN REGEXP_SUBSTR(rec.data_type, '^[A-Z]+') = 'TIMESTAMP' THEN
|
WHEN REGEXP_SUBSTR(rec.data_type, '^[A-Z]+') = 'TIMESTAMP' THEN
|
||||||
-- Other TIMESTAMP types (without timezone)
|
-- Other TIMESTAMP types (without timezone)
|
||||||
rec.quoted_column_name || ' TIMESTAMP ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
-- SQL*Loader syntax: CHAR(length) DATE_FORMAT TIMESTAMP MASK "format" (not: TIMESTAMP 'format')
|
||||||
|
rec.quoted_column_name || ' CHAR(35) DATE_FORMAT TIMESTAMP MASK ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
||||||
WHEN rec.data_type IN ('CHAR', 'NCHAR', 'VARCHAR2', 'NVARCHAR2') THEN
|
WHEN rec.data_type IN ('CHAR', 'NCHAR', 'VARCHAR2', 'NVARCHAR2') THEN
|
||||||
-- For CSV field definitions, use data_length for CHAR() specification
|
-- For CSV field definitions, use data_length for CHAR() specification
|
||||||
rec.quoted_column_name || ' CHAR(' || rec.data_length || ')'
|
rec.quoted_column_name || ' CHAR(' || rec.data_length || ')'
|
||||||
@@ -1680,10 +1681,10 @@ AS
|
|||||||
||cgBL||pLevel||'TEMPLATE_TABLE_NAME = '||pSourceFileConfig.TEMPLATE_TABLE_NAME
|
||cgBL||pLevel||'TEMPLATE_TABLE_NAME = '||pSourceFileConfig.TEMPLATE_TABLE_NAME
|
||||||
||cgBL||pLevel||'CONTAINER_FILE_KEY = '||pSourceFileConfig.CONTAINER_FILE_KEY
|
||cgBL||pLevel||'CONTAINER_FILE_KEY = '||pSourceFileConfig.CONTAINER_FILE_KEY
|
||||||
||cgBL||pLevel||'ODS_SCHEMA_NAME = '||pSourceFileConfig.ODS_SCHEMA_NAME
|
||cgBL||pLevel||'ODS_SCHEMA_NAME = '||pSourceFileConfig.ODS_SCHEMA_NAME
|
||||||
||cgBL||pLevel||'DAYS_FOR_ARCHIVE_THRESHOLD = '||pSourceFileConfig.DAYS_FOR_ARCHIVE_THRESHOLD
|
||cgBL||pLevel||'ARCHIVE_THRESHOLD_DAYS = '||pSourceFileConfig.ARCHIVE_THRESHOLD_DAYS
|
||||||
||cgBL||pLevel||'FILES_COUNT_OVER_ARCHIVE_THRESHOLD = '||pSourceFileConfig.FILES_COUNT_OVER_ARCHIVE_THRESHOLD
|
||cgBL||pLevel||'ARCHIVE_THRESHOLD_FILES_COUNT = '||pSourceFileConfig.ARCHIVE_THRESHOLD_FILES_COUNT
|
||||||
||cgBL||pLevel||'BYTES_SUM_OVER_ARCHIVE_THRESHOLD = '||pSourceFileConfig.BYTES_SUM_OVER_ARCHIVE_THRESHOLD
|
||cgBL||pLevel||'ARCHIVE_THRESHOLD_BYTES_SUM = '||pSourceFileConfig.ARCHIVE_THRESHOLD_BYTES_SUM
|
||||||
||cgBL||pLevel||'ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = '||pSourceFileConfig.ROWS_COUNT_OVER_ARCHIVE_THRESHOLD
|
||cgBL||pLevel||'ARCHIVE_THRESHOLD_ROWS_COUNT = '||pSourceFileConfig.ARCHIVE_THRESHOLD_ROWS_COUNT
|
||||||
||cgBL||pLevel||'HOURS_TO_EXPIRE_STATISTICS = '||pSourceFileConfig.HOURS_TO_EXPIRE_STATISTICS
|
||cgBL||pLevel||'HOURS_TO_EXPIRE_STATISTICS = '||pSourceFileConfig.HOURS_TO_EXPIRE_STATISTICS
|
||||||
|
|
||||||
||cgBL||pLevel||''||'--------------------------------'
|
||cgBL||pLevel||''||'--------------------------------'
|
||||||
|
|||||||
@@ -17,14 +17,16 @@ AS
|
|||||||
**/
|
**/
|
||||||
|
|
||||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.5.0';
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.5.1';
|
||||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-18 16:00:00';
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-24 13:35:00';
|
||||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
-- Version History (Latest changes first)
|
-- Version History (Latest changes first)
|
||||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.5.1 (2026-02-24): Fixed TIMESTAMP field syntax in GENERATE_EXTERNAL_TABLE_PARAMS for SQL*Loader compatibility (CHAR(35) DATE_FORMAT TIMESTAMP MASK format)' || CHR(13)||CHR(10) ||
|
||||||
'3.5.0 (2026-02-18): MARS-1057 - Added pArea parameter for selective table creation (INBOX/ODS/ARCHIVE/ALL)' || CHR(13)||CHR(10) ||
|
'3.5.0 (2026-02-18): MARS-1057 - Added pArea parameter for selective table creation (INBOX/ODS/ARCHIVE/ALL)' || CHR(13)||CHR(10) ||
|
||||||
'3.4.0 (2025-11-27): MARS-1057 - Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH procedures for batch external table creation' || CHR(13)||CHR(10) ||
|
'3.4.0 (2025-11-27): MARS-1057 - Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH procedures for batch external table creation' || CHR(13)||CHR(10) ||
|
||||||
|
'3.3.2 (2026-02-20): MARS-828 - Fixed threshold column names in GET_DET_SOURCE_FILE_CONFIG_INFO for MARS-828 compatibility' || CHR(13)||CHR(10) ||
|
||||||
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
||||||
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
||||||
'3.2.1 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
'3.2.1 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
||||||
|
|||||||
@@ -0,0 +1,34 @@
|
|||||||
|
-- ====================================================================
|
||||||
|
-- T2_PEAK_LIQUIDITY_NEED Template Table
|
||||||
|
-- ====================================================================
|
||||||
|
-- Purpose: Template table for T2 Peak Liquidity Need data files
|
||||||
|
-- Schema: CT_ET_TEMPLATES
|
||||||
|
-- Created: 2026-02-24
|
||||||
|
-- ====================================================================
|
||||||
|
|
||||||
|
CREATE TABLE CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED (
|
||||||
|
A_KEY NUMBER(38,0) NOT NULL,
|
||||||
|
A_WORKFLOW_HISTORY_KEY NUMBER(38,0) NOT NULL,
|
||||||
|
BUSINESS_DATE DATE,
|
||||||
|
CURRENCY VARCHAR2(3 CHAR),
|
||||||
|
SYSTEM_ENTITY VARCHAR2(4 CHAR),
|
||||||
|
PARTY_RIAD VARCHAR2(256 CHAR),
|
||||||
|
PARTY_BIC VARCHAR2(256 CHAR),
|
||||||
|
MRG_LEND_INDIC VARCHAR2(256 CHAR),
|
||||||
|
PEAK_LIQUIDITY_NEED NUMBER(28,10),
|
||||||
|
TIMESTAMP TIMESTAMP(6)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Column comments
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.A_KEY IS 'Primary key';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.A_WORKFLOW_HISTORY_KEY IS 'Workflow history reference';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.BUSINESS_DATE IS 'Business date of the record';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.CURRENCY IS 'Currency code (ISO 4217)';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.SYSTEM_ENTITY IS 'System entity identifier';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.PARTY_RIAD IS 'Party RIAD code';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.PARTY_BIC IS 'Party BIC code';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.MRG_LEND_INDIC IS 'Marginal lending indicator';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.PEAK_LIQUIDITY_NEED IS 'Peak liquidity need amount';
|
||||||
|
COMMENT ON COLUMN CT_ET_TEMPLATES.T2_PEAK_LIQUIDITY_NEED.TIMESTAMP IS 'Record timestamp';
|
||||||
|
|
||||||
|
/
|
||||||
@@ -1324,7 +1324,8 @@ AS
|
|||||||
rec.quoted_column_name || ' CHAR(50) DATE_FORMAT TIMESTAMP WITH TIME ZONE MASK ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
rec.quoted_column_name || ' CHAR(50) DATE_FORMAT TIMESTAMP WITH TIME ZONE MASK ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
||||||
WHEN REGEXP_SUBSTR(rec.data_type, '^[A-Z]+') = 'TIMESTAMP' THEN
|
WHEN REGEXP_SUBSTR(rec.data_type, '^[A-Z]+') = 'TIMESTAMP' THEN
|
||||||
-- Other TIMESTAMP types (without timezone)
|
-- Other TIMESTAMP types (without timezone)
|
||||||
rec.quoted_column_name || ' TIMESTAMP ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
-- SQL*Loader syntax: CHAR(length) DATE_FORMAT TIMESTAMP MASK "format" (not: TIMESTAMP 'format')
|
||||||
|
rec.quoted_column_name || ' CHAR(35) DATE_FORMAT TIMESTAMP MASK ' || CHR(39) || NORMALIZE_DATE_FORMAT(GET_DATE_FORMAT(pTemplateTableName => pTemplateTableName, pColumnName => rec.column_name)) || CHR(39)
|
||||||
WHEN rec.data_type IN ('CHAR', 'NCHAR', 'VARCHAR2', 'NVARCHAR2') THEN
|
WHEN rec.data_type IN ('CHAR', 'NCHAR', 'VARCHAR2', 'NVARCHAR2') THEN
|
||||||
-- For CSV field definitions, use data_length for CHAR() specification
|
-- For CSV field definitions, use data_length for CHAR() specification
|
||||||
rec.quoted_column_name || ' CHAR(' || rec.data_length || ')'
|
rec.quoted_column_name || ' CHAR(' || rec.data_length || ')'
|
||||||
@@ -1968,9 +1969,23 @@ AS
|
|||||||
|
|
||||||
PROCEDURE CREATE_EXTERNAL_TABLES_SET (
|
PROCEDURE CREATE_EXTERNAL_TABLES_SET (
|
||||||
pSourceFileConfigKey IN NUMBER,
|
pSourceFileConfigKey IN NUMBER,
|
||||||
pRecreate IN BOOLEAN DEFAULT FALSE
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
)
|
)
|
||||||
IS
|
IS
|
||||||
|
-- Type for storing grant information
|
||||||
|
TYPE tGrantRecord IS RECORD (
|
||||||
|
grantee VARCHAR2(128),
|
||||||
|
privilege VARCHAR2(40),
|
||||||
|
grantable VARCHAR2(3)
|
||||||
|
);
|
||||||
|
TYPE tGrantList IS TABLE OF tGrantRecord;
|
||||||
|
|
||||||
|
vInboxGrants tGrantList;
|
||||||
|
vOdsGrants tGrantList;
|
||||||
|
vArchiveGrants tGrantList;
|
||||||
|
|
||||||
vSourceKey VARCHAR2(50);
|
vSourceKey VARCHAR2(50);
|
||||||
vSourceFileId VARCHAR2(100);
|
vSourceFileId VARCHAR2(100);
|
||||||
vTableId VARCHAR2(100);
|
vTableId VARCHAR2(100);
|
||||||
@@ -1986,28 +2001,124 @@ AS
|
|||||||
vOdsPrefix VARCHAR2(500);
|
vOdsPrefix VARCHAR2(500);
|
||||||
vArchivePrefix VARCHAR2(500);
|
vArchivePrefix VARCHAR2(500);
|
||||||
|
|
||||||
vTableExists NUMBER;
|
|
||||||
vParameters VARCHAR2(4000);
|
vParameters VARCHAR2(4000);
|
||||||
|
vAreaUpper VARCHAR2(20);
|
||||||
|
|
||||||
|
-- Nested procedure to save table grants before DROP
|
||||||
|
PROCEDURE SAVE_GRANTS(pTableName VARCHAR2, pGrantList OUT tGrantList) IS
|
||||||
|
BEGIN
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Saving grants for table: ' || pTableName, 'DEBUG');
|
||||||
|
|
||||||
|
SELECT grantee, privilege, grantable
|
||||||
|
BULK COLLECT INTO pGrantList
|
||||||
|
FROM ALL_TAB_PRIVS
|
||||||
|
WHERE table_schema = SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA')
|
||||||
|
AND table_name = pTableName
|
||||||
|
AND grantee NOT IN ('SYS', 'SYSTEM', 'PUBLIC') -- Exclude system accounts
|
||||||
|
ORDER BY grantee, privilege;
|
||||||
|
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Saved ' || pGrantList.COUNT || ' grants for table: ' || SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') || '.' || pTableName,
|
||||||
|
'INFO'
|
||||||
|
);
|
||||||
|
EXCEPTION
|
||||||
|
WHEN NO_DATA_FOUND THEN
|
||||||
|
pGrantList := tGrantList(); -- Empty list
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('No grants found for table: ' || pTableName, 'INFO');
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Warning: Could not save grants for ' || pTableName || ': ' || SQLERRM,
|
||||||
|
'WARNING'
|
||||||
|
);
|
||||||
|
pGrantList := tGrantList(); -- Empty list on error
|
||||||
|
END SAVE_GRANTS;
|
||||||
|
|
||||||
|
-- Nested procedure to restore table grants after CREATE
|
||||||
|
PROCEDURE RESTORE_GRANTS(pTableName VARCHAR2, pGrantList tGrantList) IS
|
||||||
|
vGrantSQL VARCHAR2(500);
|
||||||
|
vGrantCount NUMBER := 0;
|
||||||
|
vFailCount NUMBER := 0;
|
||||||
|
BEGIN
|
||||||
|
IF pGrantList IS NULL OR pGrantList.COUNT = 0 THEN
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'No grants to restore for table: ' || pTableName,
|
||||||
|
'INFO'
|
||||||
|
);
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Restoring ' || pGrantList.COUNT || ' grants for table: ' || pTableName,
|
||||||
|
'DEBUG'
|
||||||
|
);
|
||||||
|
|
||||||
|
FOR i IN 1..pGrantList.COUNT LOOP
|
||||||
|
BEGIN
|
||||||
|
vGrantSQL := 'GRANT ' || pGrantList(i).privilege ||
|
||||||
|
' ON ' || SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') || '.' || pTableName ||
|
||||||
|
' TO ' || pGrantList(i).grantee;
|
||||||
|
|
||||||
|
IF pGrantList(i).grantable = 'YES' THEN
|
||||||
|
vGrantSQL := vGrantSQL || ' WITH GRANT OPTION';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
EXECUTE IMMEDIATE vGrantSQL;
|
||||||
|
vGrantCount := vGrantCount + 1;
|
||||||
|
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Restored grant: ' || pGrantList(i).privilege ||
|
||||||
|
' TO ' || pGrantList(i).grantee ||
|
||||||
|
CASE WHEN pGrantList(i).grantable = 'YES' THEN ' WITH GRANT OPTION' ELSE '' END,
|
||||||
|
'DEBUG'
|
||||||
|
);
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
vFailCount := vFailCount + 1;
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Warning: Could not restore grant (' || pGrantList(i).privilege ||
|
||||||
|
' TO ' || pGrantList(i).grantee || ') on ' || pTableName || ': ' || SQLERRM,
|
||||||
|
'WARNING'
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Restored ' || vGrantCount || ' of ' || pGrantList.COUNT ||
|
||||||
|
' grants for table: ' || SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') || '.' || pTableName ||
|
||||||
|
CASE WHEN vFailCount > 0 THEN ' (' || vFailCount || ' failed)' ELSE '' END,
|
||||||
|
'INFO'
|
||||||
|
);
|
||||||
|
END RESTORE_GRANTS;
|
||||||
|
|
||||||
PROCEDURE DROP_IF_EXISTS(pTableName VARCHAR2) IS
|
PROCEDURE DROP_IF_EXISTS(pTableName VARCHAR2) IS
|
||||||
BEGIN
|
BEGIN
|
||||||
SELECT COUNT(*) INTO vTableExists
|
ENV_MANAGER.LOG_PROCESS_EVENT('Attempting to drop table: ' || pTableName, 'DEBUG');
|
||||||
FROM ALL_TABLES
|
EXECUTE IMMEDIATE 'DROP TABLE ' || SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') || '.' || pTableName;
|
||||||
WHERE OWNER = 'ODS' AND TABLE_NAME = pTableName;
|
ENV_MANAGER.LOG_PROCESS_EVENT('Table dropped successfully: ' || pTableName, 'INFO');
|
||||||
|
|
||||||
IF vTableExists > 0 THEN
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Dropping existing table: ' || pTableName, 'INFO');
|
|
||||||
EXECUTE IMMEDIATE 'DROP TABLE ODS.' || pTableName;
|
|
||||||
END IF;
|
|
||||||
EXCEPTION
|
EXCEPTION
|
||||||
WHEN OTHERS THEN
|
WHEN OTHERS THEN
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Error dropping table ' || pTableName || ': ' || SQLERRM, 'WARNING');
|
IF SQLCODE = -942 THEN -- ORA-00942: table or view does not exist
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Table does not exist, skipping drop: ' || pTableName, 'INFO');
|
||||||
|
ELSE
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Error dropping table ' || pTableName || ': ' || SQLERRM, 'WARNING');
|
||||||
|
RAISE; -- Re-raise if not "table not exists" error
|
||||||
|
END IF;
|
||||||
END DROP_IF_EXISTS;
|
END DROP_IF_EXISTS;
|
||||||
|
|
||||||
BEGIN
|
BEGIN
|
||||||
|
-- Validate and normalize pArea parameter
|
||||||
|
vAreaUpper := UPPER(TRIM(pArea));
|
||||||
|
|
||||||
|
IF vAreaUpper NOT IN ('INBOX', 'ODS', 'ARCHIVE', 'ALL') THEN
|
||||||
|
vgMsgTmp := 'Invalid pArea parameter: ''' || pArea || '''. Must be one of: INBOX, ODS, ARCHIVE, ALL';
|
||||||
|
RAISE_APPLICATION_ERROR(-20010, vgMsgTmp);
|
||||||
|
END IF;
|
||||||
|
|
||||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST(
|
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST(
|
||||||
'pSourceFileConfigKey => ' || NVL(TO_CHAR(pSourceFileConfigKey), 'NULL'),
|
'pSourceFileConfigKey => ' || NVL(TO_CHAR(pSourceFileConfigKey), 'NULL'),
|
||||||
'pRecreate => ' || CASE WHEN pRecreate THEN 'TRUE' ELSE 'FALSE' END
|
'pRecreate => ' || CASE WHEN pRecreate THEN 'TRUE' ELSE 'FALSE' END,
|
||||||
|
'pRestoreGrants => ' || CASE WHEN pRestoreGrants THEN 'TRUE' ELSE 'FALSE' END,
|
||||||
|
'pArea => ''' || vAreaUpper || ''''
|
||||||
));
|
));
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start CREATE_EXTERNAL_TABLES_SET', 'INFO', vParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Start CREATE_EXTERNAL_TABLES_SET', 'INFO', vParameters);
|
||||||
|
|
||||||
@@ -2017,7 +2128,7 @@ AS
|
|||||||
NVL(ENCODING, 'UTF8')
|
NVL(ENCODING, 'UTF8')
|
||||||
INTO vSourceKey, vSourceFileId, vTableId, vTemplateTableName,
|
INTO vSourceKey, vSourceFileId, vTableId, vTemplateTableName,
|
||||||
vEncoding
|
vEncoding
|
||||||
FROM A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE A_SOURCE_FILE_CONFIG_KEY = pSourceFileConfigKey;
|
WHERE A_SOURCE_FILE_CONFIG_KEY = pSourceFileConfigKey;
|
||||||
|
|
||||||
-- Set default delimiter (column DELIMITER does not exist in A_SOURCE_FILE_CONFIG)
|
-- Set default delimiter (column DELIMITER does not exist in A_SOURCE_FILE_CONFIG)
|
||||||
@@ -2030,9 +2141,9 @@ AS
|
|||||||
END;
|
END;
|
||||||
|
|
||||||
-- 2. Generate table names
|
-- 2. Generate table names
|
||||||
vInboxTableName := vSourceKey || '_' || vTableId || '_INBOX';
|
vInboxTableName := vTableId || '_INBOX';
|
||||||
vOdsTableName := vSourceKey || '_' || vTableId || '_ODS';
|
vOdsTableName := vTableId || '_ODS';
|
||||||
vArchiveTableName := vSourceKey || '_' || vTableId || '_ARCHIVE';
|
vArchiveTableName := vTableId || '_ARCHIVE';
|
||||||
|
|
||||||
-- 3. Generate paths (OFFICIAL PATH PATTERNS)
|
-- 3. Generate paths (OFFICIAL PATH PATTERNS)
|
||||||
vInboxPrefix := 'INBOX/' || vSourceKey || '/' || vSourceFileId || '/' || vTableId;
|
vInboxPrefix := 'INBOX/' || vSourceKey || '/' || vSourceFileId || '/' || vTableId;
|
||||||
@@ -2040,52 +2151,103 @@ AS
|
|||||||
vArchivePrefix := 'ARCHIVE/' || vSourceKey || '/' || vTableId;
|
vArchivePrefix := 'ARCHIVE/' || vSourceKey || '/' || vTableId;
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT(
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Creating external tables for: ' || vSourceKey || '/' || vSourceFileId || '/' || vTableId,
|
'Creating external tables for: ' || vSourceKey || '/' || vSourceFileId || '/' || vTableId ||
|
||||||
|
' (Area: ' || vAreaUpper || ')',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
|
|
||||||
-- 4. DROP existing tables if pRecreate = TRUE
|
-- 4. DROP existing tables if pRecreate = TRUE
|
||||||
IF pRecreate THEN
|
IF pRecreate THEN
|
||||||
DROP_IF_EXISTS(vInboxTableName);
|
-- Save grants before dropping tables (if pRestoreGrants = TRUE)
|
||||||
DROP_IF_EXISTS(vOdsTableName);
|
IF pRestoreGrants THEN
|
||||||
DROP_IF_EXISTS(vArchiveTableName);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Saving grants before dropping tables...', 'INFO');
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('INBOX', 'ALL') THEN
|
||||||
|
SAVE_GRANTS(vInboxTableName, vInboxGrants);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('ODS', 'ALL') THEN
|
||||||
|
SAVE_GRANTS(vOdsTableName, vOdsGrants);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('ARCHIVE', 'ALL') THEN
|
||||||
|
SAVE_GRANTS(vArchiveTableName, vArchiveGrants);
|
||||||
|
END IF;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Drop existing tables based on pArea
|
||||||
|
IF vAreaUpper IN ('INBOX', 'ALL') THEN
|
||||||
|
DROP_IF_EXISTS(vInboxTableName);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('ODS', 'ALL') THEN
|
||||||
|
DROP_IF_EXISTS(vOdsTableName);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('ARCHIVE', 'ALL') THEN
|
||||||
|
DROP_IF_EXISTS(vArchiveTableName);
|
||||||
|
END IF;
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- 5. Create INBOX external table
|
-- 5. Create INBOX external table (if requested)
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Creating INBOX external table: ' || vInboxTableName, 'INFO');
|
IF vAreaUpper IN ('INBOX', 'ALL') THEN
|
||||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
ENV_MANAGER.LOG_PROCESS_EVENT('Creating INBOX external table: ' || vInboxTableName, 'INFO');
|
||||||
pTableName => vInboxTableName,
|
CREATE_EXTERNAL_TABLE(
|
||||||
pTemplateTableName => vTemplateTableName,
|
pTableName => vInboxTableName,
|
||||||
pPrefix => vInboxPrefix,
|
pTemplateTableName => vTemplateTableName,
|
||||||
pBucketUri => ENV_MANAGER.gvInboxBucketUri,
|
pPrefix => vInboxPrefix,
|
||||||
pDelimiter => vDelimiter,
|
pBucketUri => ENV_MANAGER.gvInboxBucketUri,
|
||||||
pEncoding => vEncoding
|
pDelimiter => vDelimiter,
|
||||||
);
|
pEncoding => vEncoding
|
||||||
|
);
|
||||||
|
END IF;
|
||||||
|
|
||||||
-- 6. Create ODS external table
|
-- 6. Create ODS external table (if requested)
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Creating ODS external table: ' || vOdsTableName, 'INFO');
|
IF vAreaUpper IN ('ODS', 'ALL') THEN
|
||||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
ENV_MANAGER.LOG_PROCESS_EVENT('Creating ODS external table: ' || vOdsTableName, 'INFO');
|
||||||
pTableName => vOdsTableName,
|
CREATE_EXTERNAL_TABLE(
|
||||||
pTemplateTableName => vTemplateTableName,
|
pTableName => vOdsTableName,
|
||||||
pPrefix => vOdsPrefix,
|
pTemplateTableName => vTemplateTableName,
|
||||||
pBucketUri => ENV_MANAGER.gvDataBucketUri,
|
pPrefix => vOdsPrefix,
|
||||||
pDelimiter => vDelimiter,
|
pBucketUri => ENV_MANAGER.gvDataBucketUri,
|
||||||
pEncoding => vEncoding
|
pDelimiter => vDelimiter,
|
||||||
);
|
pEncoding => vEncoding
|
||||||
|
);
|
||||||
|
END IF;
|
||||||
|
|
||||||
-- 7. Create ARCHIVE external table
|
-- 7. Create ARCHIVE external table (if requested)
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Creating ARCHIVE external table: ' || vArchiveTableName, 'INFO');
|
IF vAreaUpper IN ('ARCHIVE', 'ALL') THEN
|
||||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
ENV_MANAGER.LOG_PROCESS_EVENT('Creating ARCHIVE external table: ' || vArchiveTableName, 'INFO');
|
||||||
pTableName => vArchiveTableName,
|
CREATE_EXTERNAL_TABLE(
|
||||||
pTemplateTableName => vTemplateTableName,
|
pTableName => vArchiveTableName,
|
||||||
pPrefix => vArchivePrefix,
|
pTemplateTableName => vTemplateTableName,
|
||||||
pBucketUri => ENV_MANAGER.gvArchiveBucketUri,
|
pPrefix => vArchivePrefix,
|
||||||
pDelimiter => vDelimiter,
|
pBucketUri => ENV_MANAGER.gvArchiveBucketUri,
|
||||||
pEncoding => vEncoding
|
pDelimiter => vDelimiter,
|
||||||
);
|
pEncoding => vEncoding
|
||||||
|
);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- 8. Restore grants after creating tables (if pRecreate = TRUE and pRestoreGrants = TRUE)
|
||||||
|
IF pRecreate AND pRestoreGrants THEN
|
||||||
|
ENV_MANAGER.LOG_PROCESS_EVENT('Restoring grants after creating tables...', 'INFO');
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('INBOX', 'ALL') THEN
|
||||||
|
RESTORE_GRANTS(vInboxTableName, vInboxGrants);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('ODS', 'ALL') THEN
|
||||||
|
RESTORE_GRANTS(vOdsTableName, vOdsGrants);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF vAreaUpper IN ('ARCHIVE', 'ALL') THEN
|
||||||
|
RESTORE_GRANTS(vArchiveTableName, vArchiveGrants);
|
||||||
|
END IF;
|
||||||
|
END IF;
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT(
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'End CREATE_EXTERNAL_TABLES_SET - Successfully created all 3 external tables for config: ' || pSourceFileConfigKey,
|
'End CREATE_EXTERNAL_TABLES_SET - Successfully created external tables for config: ' ||
|
||||||
|
pSourceFileConfigKey || ' (Area: ' || vAreaUpper || ')',
|
||||||
'INFO',
|
'INFO',
|
||||||
vParameters
|
vParameters
|
||||||
);
|
);
|
||||||
@@ -2103,19 +2265,32 @@ AS
|
|||||||
pSourceKey IN VARCHAR2 DEFAULT NULL,
|
pSourceKey IN VARCHAR2 DEFAULT NULL,
|
||||||
pSourceFileId IN VARCHAR2 DEFAULT NULL,
|
pSourceFileId IN VARCHAR2 DEFAULT NULL,
|
||||||
pTableId IN VARCHAR2 DEFAULT NULL,
|
pTableId IN VARCHAR2 DEFAULT NULL,
|
||||||
pRecreate IN BOOLEAN DEFAULT FALSE
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
)
|
)
|
||||||
IS
|
IS
|
||||||
vCount NUMBER := 0;
|
vCount NUMBER := 0;
|
||||||
vProcessed NUMBER := 0;
|
vProcessed NUMBER := 0;
|
||||||
vFailed NUMBER := 0;
|
vFailed NUMBER := 0;
|
||||||
vParameters VARCHAR2(4000);
|
vParameters VARCHAR2(4000);
|
||||||
|
vAreaUpper VARCHAR2(20);
|
||||||
BEGIN
|
BEGIN
|
||||||
|
-- Validate and normalize pArea parameter
|
||||||
|
vAreaUpper := UPPER(TRIM(pArea));
|
||||||
|
|
||||||
|
IF vAreaUpper NOT IN ('INBOX', 'ODS', 'ARCHIVE', 'ALL') THEN
|
||||||
|
vgMsgTmp := 'Invalid pArea parameter: ''' || pArea || '''. Must be one of: INBOX, ODS, ARCHIVE, ALL';
|
||||||
|
RAISE_APPLICATION_ERROR(-20010, vgMsgTmp);
|
||||||
|
END IF;
|
||||||
|
|
||||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST(
|
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST(
|
||||||
'pSourceKey => ''' || NVL(pSourceKey, 'NULL') || '''',
|
'pSourceKey => ''' || NVL(pSourceKey, 'NULL') || '''',
|
||||||
'pSourceFileId => ''' || NVL(pSourceFileId, 'NULL') || '''',
|
'pSourceFileId => ''' || NVL(pSourceFileId, 'NULL') || '''',
|
||||||
'pTableId => ''' || NVL(pTableId, 'NULL') || '''',
|
'pTableId => ''' || NVL(pTableId, 'NULL') || '''',
|
||||||
'pRecreate => ' || CASE WHEN pRecreate THEN 'TRUE' ELSE 'FALSE' END
|
'pRecreate => ' || CASE WHEN pRecreate THEN 'TRUE' ELSE 'FALSE' END,
|
||||||
|
'pRestoreGrants => ' || CASE WHEN pRestoreGrants THEN 'TRUE' ELSE 'FALSE' END,
|
||||||
|
'pArea => ''' || vAreaUpper || ''''
|
||||||
));
|
));
|
||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start CREATE_EXTERNAL_TABLES_BATCH', 'INFO', vParameters);
|
ENV_MANAGER.LOG_PROCESS_EVENT('Start CREATE_EXTERNAL_TABLES_BATCH', 'INFO', vParameters);
|
||||||
@@ -2123,7 +2298,7 @@ AS
|
|||||||
-- Iterate over configurations matching criteria (only INPUT files)
|
-- Iterate over configurations matching criteria (only INPUT files)
|
||||||
FOR rec IN (
|
FOR rec IN (
|
||||||
SELECT A_SOURCE_FILE_CONFIG_KEY, A_SOURCE_KEY, SOURCE_FILE_ID, TABLE_ID
|
SELECT A_SOURCE_FILE_CONFIG_KEY, A_SOURCE_KEY, SOURCE_FILE_ID, TABLE_ID
|
||||||
FROM A_SOURCE_FILE_CONFIG
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
AND (pSourceKey IS NULL OR A_SOURCE_KEY = pSourceKey)
|
AND (pSourceKey IS NULL OR A_SOURCE_KEY = pSourceKey)
|
||||||
AND (pSourceFileId IS NULL OR SOURCE_FILE_ID = pSourceFileId)
|
AND (pSourceFileId IS NULL OR SOURCE_FILE_ID = pSourceFileId)
|
||||||
@@ -2135,14 +2310,16 @@ AS
|
|||||||
BEGIN
|
BEGIN
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT(
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'Creating external tables set for: ' || rec.A_SOURCE_KEY || '/' ||
|
'Creating external tables set for: ' || rec.A_SOURCE_KEY || '/' ||
|
||||||
rec.SOURCE_FILE_ID || '/' || rec.TABLE_ID,
|
rec.SOURCE_FILE_ID || '/' || rec.TABLE_ID || ' (Area: ' || vAreaUpper || ')',
|
||||||
'INFO'
|
'INFO'
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Call procedure to create set of 3 tables
|
-- Call procedure to create set of tables (based on pArea)
|
||||||
CREATE_EXTERNAL_TABLES_SET(
|
CREATE_EXTERNAL_TABLES_SET(
|
||||||
pSourceFileConfigKey => rec.A_SOURCE_FILE_CONFIG_KEY,
|
pSourceFileConfigKey => rec.A_SOURCE_FILE_CONFIG_KEY,
|
||||||
pRecreate => pRecreate
|
pRecreate => pRecreate,
|
||||||
|
pRestoreGrants => pRestoreGrants,
|
||||||
|
pArea => vAreaUpper
|
||||||
);
|
);
|
||||||
|
|
||||||
vProcessed := vProcessed + 1;
|
vProcessed := vProcessed + 1;
|
||||||
@@ -2161,7 +2338,7 @@ AS
|
|||||||
|
|
||||||
ENV_MANAGER.LOG_PROCESS_EVENT(
|
ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
'End CREATE_EXTERNAL_TABLES_BATCH - Total: ' || vCount ||
|
'End CREATE_EXTERNAL_TABLES_BATCH - Total: ' || vCount ||
|
||||||
', Processed: ' || vProcessed || ', Failed: ' || vFailed,
|
', Processed: ' || vProcessed || ', Failed: ' || vFailed || ' (Area: ' || vAreaUpper || ')',
|
||||||
'INFO',
|
'INFO',
|
||||||
vParameters
|
vParameters
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -17,12 +17,14 @@ AS
|
|||||||
**/
|
**/
|
||||||
|
|
||||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.4.0';
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.5.1';
|
||||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-11-27 14:00:00';
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-24 13:35:00';
|
||||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
-- Version History (Latest changes first)
|
-- Version History (Latest changes first)
|
||||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'3.5.1 (2026-02-24): Fixed TIMESTAMP field syntax in GENERATE_EXTERNAL_TABLE_PARAMS for SQL*Loader compatibility (CHAR(35) DATE_FORMAT TIMESTAMP MASK format)' || CHR(13)||CHR(10) ||
|
||||||
|
'3.5.0 (2026-02-18): MARS-1057 - Added pArea parameter for selective table creation (INBOX/ODS/ARCHIVE/ALL)' || CHR(13)||CHR(10) ||
|
||||||
'3.4.0 (2025-11-27): MARS-1057 - Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH procedures for batch external table creation' || CHR(13)||CHR(10) ||
|
'3.4.0 (2025-11-27): MARS-1057 - Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH procedures for batch external table creation' || CHR(13)||CHR(10) ||
|
||||||
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
'3.3.1 (2025-11-27): MARS-1046 - Fixed ISO 8601 datetime format parsing with milliseconds and timezone (e.g., 2012-03-02T14:16:23.798+01:00)' || CHR(13)||CHR(10) ||
|
||||||
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
'3.3.0 (2025-11-26): MARS-1056 - Fixed VARCHAR2 definitions in GENERATE_EXTERNAL_TABLE_PARAMS to preserve CHAR/BYTE semantics from template tables' || CHR(13)||CHR(10) ||
|
||||||
@@ -602,48 +604,72 @@ AS
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @name CREATE_EXTERNAL_TABLES_SET
|
* @name CREATE_EXTERNAL_TABLES_SET
|
||||||
* @desc Creates a complete set of 3 external tables (INBOX, ODS, ARCHIVE) for a single configuration
|
* @desc Creates a complete set of external tables for a single configuration from A_SOURCE_FILE_CONFIG table.
|
||||||
* from A_SOURCE_FILE_CONFIG table. Automatically generates table names and paths following
|
* Automatically generates table names and paths following official path patterns.
|
||||||
* official path patterns. Optionally drops and recreates existing tables.
|
* Optionally drops and recreates existing tables. If pRestoreGrants is TRUE, saves and restores table grants.
|
||||||
|
* The pArea parameter allows selective table creation.
|
||||||
* @param pSourceFileConfigKey - Primary key from A_SOURCE_FILE_CONFIG table
|
* @param pSourceFileConfigKey - Primary key from A_SOURCE_FILE_CONFIG table
|
||||||
* @param pRecreate - If TRUE, drops existing tables before creating new ones; if FALSE, fails if tables exist
|
* @param pRecreate - If TRUE, drops existing tables before creating new ones; if FALSE, fails if tables exist
|
||||||
* @example BEGIN
|
* @param pRestoreGrants - If TRUE, saves grants before DROP and restores after CREATE (only when pRecreate=TRUE)
|
||||||
|
* Uses DBA_TAB_PRIVS - requires SELECT ANY DICTIONARY or SELECT ON DBA_TAB_PRIVS privilege
|
||||||
|
* @param pArea - Specifies which tables to create: 'INBOX', 'ODS', 'ARCHIVE', or 'ALL' (default)
|
||||||
|
* 'INBOX' - creates only INBOX table
|
||||||
|
* 'ODS' - creates only ODS table
|
||||||
|
* 'ARCHIVE' - creates only ARCHIVE table
|
||||||
|
* 'ALL' - creates all three tables (default)
|
||||||
|
* @example -- Create only INBOX table
|
||||||
|
* BEGIN
|
||||||
* FILE_MANAGER.CREATE_EXTERNAL_TABLES_SET(
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLES_SET(
|
||||||
* pSourceFileConfigKey => 123,
|
* pSourceFileConfigKey => 123,
|
||||||
* pRecreate => FALSE
|
* pArea => 'INBOX'
|
||||||
* );
|
* );
|
||||||
* END;
|
* END;
|
||||||
* @ex_rslt Creates three external tables in ODS schema:
|
*
|
||||||
* - C2D_A_UC_DISSEM_METADATA_LOADS_INBOX
|
* -- Create all tables with grant preservation
|
||||||
* - C2D_A_UC_DISSEM_METADATA_LOADS_ODS
|
* BEGIN
|
||||||
* - C2D_A_UC_DISSEM_METADATA_LOADS_ARCHIVE
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLES_SET(
|
||||||
|
* pSourceFileConfigKey => 123,
|
||||||
|
* pRecreate => TRUE,
|
||||||
|
* pRestoreGrants => TRUE,
|
||||||
|
* pArea => 'ALL'
|
||||||
|
* );
|
||||||
|
* END;
|
||||||
|
* @ex_rslt Creates external table(s) in ODS schema based on pArea parameter
|
||||||
**/
|
**/
|
||||||
PROCEDURE CREATE_EXTERNAL_TABLES_SET (
|
PROCEDURE CREATE_EXTERNAL_TABLES_SET (
|
||||||
pSourceFileConfigKey IN NUMBER,
|
pSourceFileConfigKey IN NUMBER,
|
||||||
pRecreate IN BOOLEAN DEFAULT FALSE
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @name CREATE_EXTERNAL_TABLES_BATCH
|
* @name CREATE_EXTERNAL_TABLES_BATCH
|
||||||
* @desc Creates external table sets for multiple configurations based on filter criteria.
|
* @desc Creates external table sets for multiple configurations based on filter criteria.
|
||||||
* Processes only INPUT type files from A_SOURCE_FILE_CONFIG. Creates 3 tables per configuration
|
* Processes only INPUT type files from A_SOURCE_FILE_CONFIG. Creates tables based on pArea parameter
|
||||||
* (INBOX, ODS, ARCHIVE). Continues processing even if individual sets fail.
|
* (INBOX, ODS, ARCHIVE, or ALL). Continues processing even if individual sets fail.
|
||||||
|
* If pRestoreGrants is TRUE, saves and restores table grants during recreate operations.
|
||||||
* @param pSourceKey - Filter by A_SOURCE_KEY (NULL = all sources)
|
* @param pSourceKey - Filter by A_SOURCE_KEY (NULL = all sources)
|
||||||
* @param pSourceFileId - Filter by SOURCE_FILE_ID (NULL = all file types)
|
* @param pSourceFileId - Filter by SOURCE_FILE_ID (NULL = all file types)
|
||||||
* @param pTableId - Filter by TABLE_ID (NULL = all tables)
|
* @param pTableId - Filter by TABLE_ID (NULL = all tables)
|
||||||
* @param pRecreate - If TRUE, drops and recreates existing tables; if FALSE, skips if tables exist
|
* @param pRecreate - If TRUE, drops and recreates existing tables; if FALSE, skips if tables exist
|
||||||
* @example -- Create all external tables for C2D source
|
* @param pRestoreGrants - If TRUE, saves grants before DROP and restores after CREATE (only when pRecreate=TRUE)
|
||||||
|
* Uses DBA_TAB_PRIVS - requires SELECT ANY DICTIONARY or SELECT ON DBA_TAB_PRIVS privilege
|
||||||
|
* @param pArea - Specifies which tables to create: 'INBOX', 'ODS', 'ARCHIVE', or 'ALL' (default)
|
||||||
|
* @example -- Create only INBOX tables for C2D source
|
||||||
* BEGIN
|
* BEGIN
|
||||||
* FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH(
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH(
|
||||||
* pSourceKey => 'C2D',
|
* pSourceKey => 'C2D',
|
||||||
* pRecreate => FALSE
|
* pArea => 'INBOX'
|
||||||
* );
|
* );
|
||||||
* END;
|
* END;
|
||||||
*
|
*
|
||||||
* -- Recreate all external tables for all sources
|
* -- Create all external tables for all sources with grant preservation
|
||||||
* BEGIN
|
* BEGIN
|
||||||
* FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH(
|
* FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH(
|
||||||
* pRecreate => TRUE
|
* pRecreate => TRUE,
|
||||||
|
* pRestoreGrants => TRUE,
|
||||||
|
* pArea => 'ALL'
|
||||||
* );
|
* );
|
||||||
* END;
|
* END;
|
||||||
* @ex_rslt Returns summary: Total: 10, Processed: 9, Failed: 1
|
* @ex_rslt Returns summary: Total: 10, Processed: 9, Failed: 1
|
||||||
@@ -652,7 +678,9 @@ AS
|
|||||||
pSourceKey IN VARCHAR2 DEFAULT NULL,
|
pSourceKey IN VARCHAR2 DEFAULT NULL,
|
||||||
pSourceFileId IN VARCHAR2 DEFAULT NULL,
|
pSourceFileId IN VARCHAR2 DEFAULT NULL,
|
||||||
pTableId IN VARCHAR2 DEFAULT NULL,
|
pTableId IN VARCHAR2 DEFAULT NULL,
|
||||||
pRecreate IN BOOLEAN DEFAULT FALSE
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
);
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -56,6 +56,120 @@ AS
|
|||||||
RAISE;
|
RAISE;
|
||||||
END CREATE_EXTERNAL_TABLE;
|
END CREATE_EXTERNAL_TABLE;
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CREATE_EXTERNAL_TABLES_SET - Wrapper for CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_SET
|
||||||
|
*/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLES_SET (
|
||||||
|
pSourceFileConfigKey IN NUMBER,
|
||||||
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
|
)
|
||||||
|
IS
|
||||||
|
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||||
|
vRecreateStr VARCHAR2(10);
|
||||||
|
vRestoreGrantsStr VARCHAR2(10);
|
||||||
|
BEGIN
|
||||||
|
-- Convert BOOLEAN to VARCHAR2 for logging
|
||||||
|
vRecreateStr := CASE WHEN pRecreate THEN 'TRUE' ELSE 'FALSE' END;
|
||||||
|
vRestoreGrantsStr := CASE WHEN pRestoreGrants THEN 'TRUE' ELSE 'FALSE' END;
|
||||||
|
|
||||||
|
-- Log the start of the procedure
|
||||||
|
vParameters := CT_MRDS.ENV_MANAGER.FORMAT_PARAMETERS(
|
||||||
|
SYS.ODCIVARCHAR2LIST(
|
||||||
|
'pSourceFileConfigKey => ' || pSourceFileConfigKey,
|
||||||
|
'pRecreate => ' || vRecreateStr,
|
||||||
|
'pRestoreGrants => ' || vRestoreGrantsStr,
|
||||||
|
'pArea => ''' || pArea || ''''
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Start FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_SET', 'INFO', vParameters);
|
||||||
|
|
||||||
|
-- Call the original CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_SET procedure
|
||||||
|
-- This ensures all logic remains centralized in the original package
|
||||||
|
-- and ODS wrapper simply delegates execution with DEFINER rights
|
||||||
|
CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_SET(
|
||||||
|
pSourceFileConfigKey => pSourceFileConfigKey,
|
||||||
|
pRecreate => pRecreate,
|
||||||
|
pRestoreGrants => pRestoreGrants,
|
||||||
|
pArea => pArea
|
||||||
|
);
|
||||||
|
|
||||||
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('End FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_SET', 'INFO', vParameters);
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Error in ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_SET: ' || SQLERRM,
|
||||||
|
'ERROR',
|
||||||
|
vParameters
|
||||||
|
);
|
||||||
|
RAISE;
|
||||||
|
END CREATE_EXTERNAL_TABLES_SET;
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CREATE_EXTERNAL_TABLES_BATCH - Wrapper for CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH
|
||||||
|
*/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLES_BATCH (
|
||||||
|
pSourceKey IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pSourceFileId IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pTableId IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
|
)
|
||||||
|
IS
|
||||||
|
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||||
|
vRecreateStr VARCHAR2(10);
|
||||||
|
vRestoreGrantsStr VARCHAR2(10);
|
||||||
|
BEGIN
|
||||||
|
-- Convert BOOLEAN to VARCHAR2 for logging
|
||||||
|
vRecreateStr := CASE WHEN pRecreate THEN 'TRUE' ELSE 'FALSE' END;
|
||||||
|
vRestoreGrantsStr := CASE WHEN pRestoreGrants THEN 'TRUE' ELSE 'FALSE' END;
|
||||||
|
|
||||||
|
-- Log the start of the procedure
|
||||||
|
vParameters := CT_MRDS.ENV_MANAGER.FORMAT_PARAMETERS(
|
||||||
|
SYS.ODCIVARCHAR2LIST(
|
||||||
|
'pSourceKey => ''' || NVL(pSourceKey, 'NULL') || '''',
|
||||||
|
'pSourceFileId => ''' || NVL(pSourceFileId, 'NULL') || '''',
|
||||||
|
'pTableId => ''' || NVL(pTableId, 'NULL') || '''',
|
||||||
|
'pRecreate => ' || vRecreateStr,
|
||||||
|
'pRestoreGrants => ' || vRestoreGrantsStr,
|
||||||
|
'pArea => ''' || pArea || ''''
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('Start FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_BATCH', 'INFO', vParameters);
|
||||||
|
|
||||||
|
-- Call the original CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH procedure
|
||||||
|
-- This ensures all logic remains centralized in the original package
|
||||||
|
-- and ODS wrapper simply delegates execution with DEFINER rights
|
||||||
|
CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH(
|
||||||
|
pSourceKey => pSourceKey,
|
||||||
|
pSourceFileId => pSourceFileId,
|
||||||
|
pTableId => pTableId,
|
||||||
|
pRecreate => pRecreate,
|
||||||
|
pRestoreGrants => pRestoreGrants,
|
||||||
|
pArea => pArea
|
||||||
|
);
|
||||||
|
|
||||||
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT('End FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_BATCH', 'INFO', vParameters);
|
||||||
|
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
CT_MRDS.ENV_MANAGER.LOG_PROCESS_EVENT(
|
||||||
|
'Error in ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_BATCH: ' || SQLERRM,
|
||||||
|
'ERROR',
|
||||||
|
vParameters
|
||||||
|
);
|
||||||
|
RAISE;
|
||||||
|
END CREATE_EXTERNAL_TABLES_BATCH;
|
||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS IMPLEMENTATION
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS IMPLEMENTATION
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -16,12 +16,15 @@ AS
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.1.0';
|
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.4.0';
|
||||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-11-24 11:50:00';
|
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-18 16:00:00';
|
||||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||||
|
|
||||||
-- Version History (Latest changes first)
|
-- Version History (Latest changes first)
|
||||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||||
|
'2.4.0 (2026-02-18): MARS-1057 - Added pArea parameter for selective table creation (INBOX/ODS/ARCHIVE/ALL)' || CHR(13)||CHR(10) ||
|
||||||
|
'2.3.0 (2026-02-18): MARS-1057 - Added pRestoreGrants parameter support for grant preservation during table recreate' || CHR(13)||CHR(10) ||
|
||||||
|
'2.2.0 (2025-11-27): MARS-1057 - Added CREATE_EXTERNAL_TABLES_SET and CREATE_EXTERNAL_TABLES_BATCH wrappers for batch external table creation' || CHR(13)||CHR(10) ||
|
||||||
'2.1.0 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
'2.1.0 (2025-11-24): MARS-1049 - Added pEncoding parameter support for CSV character set specification' || CHR(13)||CHR(10) ||
|
||||||
'2.0.0 (2025-10-22): Added package versioning system using centralized ENV_MANAGER functions' || CHR(13)||CHR(10) ||
|
'2.0.0 (2025-10-22): Added package versioning system using centralized ENV_MANAGER functions' || CHR(13)||CHR(10) ||
|
||||||
'1.5.0 (2025-10-12): Enhanced external table creation with official path patterns support' || CHR(13)||CHR(10) ||
|
'1.5.0 (2025-10-12): Enhanced external table creation with official path patterns support' || CHR(13)||CHR(10) ||
|
||||||
@@ -60,6 +63,76 @@ AS
|
|||||||
pEncoding IN VARCHAR2 DEFAULT NULL
|
pEncoding IN VARCHAR2 DEFAULT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLES_SET
|
||||||
|
* @desc Creates a complete set of external tables for a single configuration from A_SOURCE_FILE_CONFIG table.
|
||||||
|
* Automatically generates table names and paths following official path patterns.
|
||||||
|
* Wrapper for CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_SET with DEFINER rights.
|
||||||
|
* If pRestoreGrants is TRUE, saves and restores table grants during recreate operation.
|
||||||
|
* The pArea parameter allows selective table creation.
|
||||||
|
* @param pSourceFileConfigKey - Primary key from A_SOURCE_FILE_CONFIG table
|
||||||
|
* @param pRecreate - If TRUE, drops existing tables before creating new ones; if FALSE, fails if tables exist
|
||||||
|
* @param pRestoreGrants - If TRUE, saves grants before DROP and restores after CREATE (only when pRecreate=TRUE)
|
||||||
|
* Uses DBA_TAB_PRIVS - requires SELECT ANY DICTIONARY or SELECT ON DBA_TAB_PRIVS privilege
|
||||||
|
* @param pArea - Specifies which tables to create: 'INBOX', 'ODS', 'ARCHIVE', or 'ALL' (default)
|
||||||
|
* @example -- Create only INBOX table
|
||||||
|
* EXEC ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_SET(
|
||||||
|
* pSourceFileConfigKey => 123,
|
||||||
|
* pArea => 'INBOX'
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* -- Create all tables with grant preservation
|
||||||
|
* EXEC ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_SET(
|
||||||
|
* pSourceFileConfigKey => 123,
|
||||||
|
* pRecreate => TRUE,
|
||||||
|
* pRestoreGrants => TRUE,
|
||||||
|
* pArea => 'ALL'
|
||||||
|
* );
|
||||||
|
* @ex_rslt Creates external table(s) in ODS schema based on pArea parameter
|
||||||
|
*/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLES_SET (
|
||||||
|
pSourceFileConfigKey IN NUMBER,
|
||||||
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @name CREATE_EXTERNAL_TABLES_BATCH
|
||||||
|
* @desc Creates external table sets for multiple configurations based on filter criteria.
|
||||||
|
* Processes only INPUT type files from A_SOURCE_FILE_CONFIG. Creates tables based on pArea parameter.
|
||||||
|
* Wrapper for CT_MRDS.FILE_MANAGER.CREATE_EXTERNAL_TABLES_BATCH with DEFINER rights.
|
||||||
|
* If pRestoreGrants is TRUE, saves and restores table grants during recreate operations.
|
||||||
|
* @param pSourceKey - Filter by A_SOURCE_KEY (NULL = all sources)
|
||||||
|
* @param pSourceFileId - Filter by SOURCE_FILE_ID (NULL = all file types)
|
||||||
|
* @param pTableId - Filter by TABLE_ID (NULL = all tables)
|
||||||
|
* @param pRecreate - If TRUE, drops and recreates existing tables; if FALSE, skips if tables exist
|
||||||
|
* @param pRestoreGrants - If TRUE, saves grants before DROP and restores after CREATE (only when pRecreate=TRUE)
|
||||||
|
* Uses DBA_TAB_PRIVS - requires SELECT ANY DICTIONARY or SELECT ON DBA_TAB_PRIVS privilege
|
||||||
|
* @param pArea - Specifies which tables to create: 'INBOX', 'ODS', 'ARCHIVE', or 'ALL' (default)
|
||||||
|
* @example -- Create only INBOX tables for C2D source
|
||||||
|
* EXEC ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_BATCH(
|
||||||
|
* pSourceKey => 'C2D',
|
||||||
|
* pArea => 'INBOX'
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* -- Create all tables with grant preservation
|
||||||
|
* EXEC ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLES_BATCH(
|
||||||
|
* pRecreate => TRUE,
|
||||||
|
* pRestoreGrants => TRUE,
|
||||||
|
* pArea => 'ALL'
|
||||||
|
* );
|
||||||
|
* @ex_rslt Returns summary: Total: 10, Processed: 9, Failed: 1
|
||||||
|
*/
|
||||||
|
PROCEDURE CREATE_EXTERNAL_TABLES_BATCH (
|
||||||
|
pSourceKey IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pSourceFileId IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pTableId IN VARCHAR2 DEFAULT NULL,
|
||||||
|
pRecreate IN BOOLEAN DEFAULT FALSE,
|
||||||
|
pRestoreGrants IN BOOLEAN DEFAULT TRUE,
|
||||||
|
pArea IN VARCHAR2 DEFAULT 'ALL'
|
||||||
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||||
---------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------
|
||||||
@@ -95,8 +168,8 @@ AS
|
|||||||
* Uses centralized ENV_MANAGER.GET_PACKAGE_VERSION_INFO function.
|
* Uses centralized ENV_MANAGER.GET_PACKAGE_VERSION_INFO function.
|
||||||
* @example SELECT FILE_MANAGER_ODS.GET_BUILD_INFO() FROM DUAL;
|
* @example SELECT FILE_MANAGER_ODS.GET_BUILD_INFO() FROM DUAL;
|
||||||
* @ex_rslt Package: FILE_MANAGER_ODS
|
* @ex_rslt Package: FILE_MANAGER_ODS
|
||||||
* Version: 2.1.0
|
* Version: 2.2.0
|
||||||
* Build Date: 2025-11-24 11:50:00
|
* Build Date: 2025-11-27 15:00:00
|
||||||
* Author: Grzegorz Michalski
|
* Author: Grzegorz Michalski
|
||||||
**/
|
**/
|
||||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ REL01_ADDITIONS
|
|||||||
MARS-826
|
MARS-826
|
||||||
|
|
||||||
-- AKtualnie pracuje nad:
|
-- AKtualnie pracuje nad:
|
||||||
MARS-828
|
MARS-828s
|
||||||
|
|
||||||
-- Poniżej czeka na wdrożenie
|
-- Poniżej czeka na wdrożenie
|
||||||
REL03
|
REL03
|
||||||
|
|||||||
@@ -10,27 +10,56 @@ The FILE_ARCHIVER package provides flexible archival strategies that accommodate
|
|||||||
|
|
||||||
- **Three Archival Strategies**: THRESHOLD_BASED, MINIMUM_AGE_MONTHS (with 0=current month only), HYBRID
|
- **Three Archival Strategies**: THRESHOLD_BASED, MINIMUM_AGE_MONTHS (with 0=current month only), HYBRID
|
||||||
- **Flexible Configuration**: Per-table archival strategy configuration via A_SOURCE_FILE_CONFIG
|
- **Flexible Configuration**: Per-table archival strategy configuration via A_SOURCE_FILE_CONFIG
|
||||||
- **Backward Compatible**: Default THRESHOLD_BASED strategy maintains existing behavior
|
|
||||||
- **Validation**: Automatic validation of strategy-specific configuration requirements
|
- **Validation**: Automatic validation of strategy-specific configuration requirements
|
||||||
- **OCI Integration**: Works seamlessly with DBMS_CLOUD operations via cloud_wrapper
|
|
||||||
|
|
||||||
### Package Information
|
### Package Information
|
||||||
|
|
||||||
- **Schema**: CT_MRDS
|
- **Schema**: CT_MRDS
|
||||||
- **Package**: FILE_ARCHIVER
|
- **Package**: FILE_ARCHIVER
|
||||||
- **Current Version**: 3.2.0
|
- **Current Version**: 3.3.0
|
||||||
- **Dependencies**: ENV_MANAGER, FILE_MANAGER, cloud_wrapper, A_SOURCE_FILE_CONFIG, A_SOURCE_FILE_RECEIVED, A_WORKFLOW_HISTORY
|
- **Dependencies**: ENV_MANAGER, FILE_MANAGER, A_SOURCE_FILE_CONFIG, A_SOURCE_FILE_RECEIVED, A_WORKFLOW_HISTORY
|
||||||
|
|
||||||
### Critical Prerequisites
|
### Critical Prerequisites
|
||||||
|
|
||||||
⚠️ **IMPORTANT**: FILE_ARCHIVER requires data to be registered in `CT_MRDS.A_SOURCE_FILE_RECEIVED` table. This table is automatically populated when files are processed through the modern Airflow + DBT workflow via `FILE_MANAGER.PROCESS_SOURCE_FILE`.
|
⚠️ **IMPORTANT**: FILE_ARCHIVER requires data to be registered in `CT_MRDS.A_SOURCE_FILE_RECEIVED` table.
|
||||||
|
|
||||||
|
**For new system data (Airflow + DBT):**
|
||||||
|
- `A_SOURCE_FILE_RECEIVED` records are automatically created by `FILE_MANAGER.PROCESS_SOURCE_FILE` during file validation
|
||||||
|
- No additional configuration needed - standard workflow handles registration
|
||||||
|
|
||||||
**For legacy data migrated from Informatica + WLA system:**
|
**For legacy data migrated from Informatica + WLA system:**
|
||||||
- Legacy data exported using `DATA_EXPORTER` does NOT automatically create `A_SOURCE_FILE_RECEIVED` records
|
- Use `DATA_EXPORTER` with **`pRegisterExport => TRUE`** parameter to automatically register exported files in `A_SOURCE_FILE_RECEIVED`
|
||||||
- Without these records, FILE_ARCHIVER **CANNOT** archive the data
|
- This enables FILE_ARCHIVER to process legacy data exports without manual registration
|
||||||
- See [System Migration Guide](System_Migration_Informatica_to_Airflow_DBT.md) for workaround strategies
|
- Available in both `EXPORT_TABLE_DATA` (single CSV) and `EXPORT_TABLE_DATA_TO_CSV_BY_DATE` (partitioned CSV exports)
|
||||||
|
|
||||||
**Recommendation for legacy data**: Export directly to ARCHIVE bucket using `DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE` with `pBucketArea => 'ARCHIVE'` to bypass this requirement
|
**Example - Legacy Data Export with Registration**:
|
||||||
|
```sql
|
||||||
|
-- Export legacy data to DATA bucket WITH automatic registration
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
|
pSchemaName => 'OU_TOP',
|
||||||
|
pTableName => 'AGGREGATED_ALLOTMENT',
|
||||||
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
pBucketArea => 'DATA',
|
||||||
|
pFolderName => 'legacy_export',
|
||||||
|
pMinDate => DATE '2024-01-01',
|
||||||
|
pMaxDate => DATE '2024-12-31',
|
||||||
|
pRegisterExport => TRUE, -- ✓ Registers files in A_SOURCE_FILE_RECEIVED
|
||||||
|
pProcessName => 'LEGACY_MIGRATION'
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Now FILE_ARCHIVER can process these files
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.FILE_ARCHIVER.ARCHIVE_TABLE_DATA(
|
||||||
|
pSourceFileConfigKey => vConfigKey
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
```
|
||||||
|
|
||||||
|
**Alternative approach**: Export directly to ARCHIVE bucket using `DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE` with `pBucketArea => 'ARCHIVE'` to bypass archival step entirely
|
||||||
|
|
||||||
## Archival Strategies
|
## Archival Strategies
|
||||||
|
|
||||||
@@ -38,7 +67,7 @@ The FILE_ARCHIVER package provides flexible archival strategies that accommodate
|
|||||||
|
|
||||||
| Strategy | WHERE Clause Logic | Configuration Required | Primary Use Case |
|
| Strategy | WHERE Clause Logic | Configuration Required | Primary Use Case |
|
||||||
|----------|-------------------|----------------------|------------------|
|
|----------|-------------------|----------------------|------------------|
|
||||||
| `THRESHOLD_BASED` | Days since workflow start > threshold | DAYS_FOR_ARCHIVE_THRESHOLD | Legacy compatibility, simple time-based archival |
|
| `THRESHOLD_BASED` | Days since workflow start > threshold | DAYS_FOR_ARCHIVE_THRESHOLD | Simple time-based archival |
|
||||||
| `MINIMUM_AGE_MONTHS` | Archive data older than X months (0=current month only) | MINIMUM_AGE_MONTHS (≥0) | All sources - flexible retention (0 for LM, 6 for CSDB) |
|
| `MINIMUM_AGE_MONTHS` | Archive data older than X months (0=current month only) | MINIMUM_AGE_MONTHS (≥0) | All sources - flexible retention (0 for LM, 6 for CSDB) |
|
||||||
| `HYBRID` | Combines month boundary + minimum age | MINIMUM_AGE_MONTHS | Advanced retention scenarios |
|
| `HYBRID` | Combines month boundary + minimum age | MINIMUM_AGE_MONTHS | Advanced retention scenarios |
|
||||||
|
|
||||||
@@ -62,11 +91,11 @@ WHERE SOURCE_FILE_TYPE = 'INPUT'
|
|||||||
AND TABLE_ID = 'C2D_TABLE';
|
AND TABLE_ID = 'C2D_TABLE';
|
||||||
```
|
```
|
||||||
|
|
||||||
**Use Case**: Simple time-based archival, backward compatible with FILE_ARCHIVER v2.0.0 behavior.
|
**Use Case**: Simple time-based archival.
|
||||||
|
|
||||||
### 2. MINIMUM_AGE_MONTHS
|
### 2. MINIMUM_AGE_MONTHS
|
||||||
|
|
||||||
Archives data older than specified number of months. **Special case**: MINIMUM_AGE_MONTHS = 0 archives all data before current month (replaces deprecated CURRENT_MONTH_ONLY strategy).
|
Archives data older than specified number of months. **Special case**: MINIMUM_AGE_MONTHS = 0 archives all data before current month.
|
||||||
|
|
||||||
**WHERE Clause**:
|
**WHERE Clause**:
|
||||||
```sql
|
```sql
|
||||||
@@ -132,6 +161,60 @@ WHERE SOURCE_FILE_TYPE = 'INPUT'
|
|||||||
|
|
||||||
**Use Case**: Advanced scenarios requiring both current month retention AND minimum age threshold.
|
**Use Case**: Advanced scenarios requiring both current month retention AND minimum age threshold.
|
||||||
|
|
||||||
|
## Archival Triggering Logic
|
||||||
|
|
||||||
|
### Strategy-Specific Execution Behavior
|
||||||
|
|
||||||
|
The FILE_ARCHIVER package uses **different triggering logic** depending on the configured archival strategy:
|
||||||
|
|
||||||
|
#### MINIMUM_AGE_MONTHS Strategy (Threshold-Independent)
|
||||||
|
|
||||||
|
**Behavior**: Archives data **immediately** when age criteria is met, **without checking** archival thresholds.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Executed when MINIMUM_AGE_MONTHS strategy is configured
|
||||||
|
IF vSourceFileConfig.ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS' THEN
|
||||||
|
vArchivalTriggeredBy := 'AGE_BASED';
|
||||||
|
-- Proceeds with archival regardless of FILES_COUNT, ROWS_COUNT, or BYTES_SUM
|
||||||
|
END IF;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why**: This strategy is designed for **strict retention policies** where data **must** be archived based on age alone (e.g., regulatory compliance requiring current month only).
|
||||||
|
|
||||||
|
#### THRESHOLD_BASED and HYBRID Strategies (Threshold-Dependent)
|
||||||
|
|
||||||
|
**Behavior**: Archives data **only when** at least one of the following thresholds is exceeded:
|
||||||
|
|
||||||
|
1. **FILES_COUNT_OVER_ARCHIVE_THRESHOLD** - Number of files eligible for archival
|
||||||
|
2. **ROWS_COUNT_OVER_ARCHIVE_THRESHOLD** - Number of rows eligible for archival
|
||||||
|
3. **BYTES_SUM_OVER_ARCHIVE_THRESHOLD** - Total size in bytes eligible for archival
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Executed for THRESHOLD_BASED and HYBRID strategies
|
||||||
|
IF vTableStat.OVER_ARCH_THRESOLD_FILE_COUNT >= vSourceFileConfig.FILES_COUNT_OVER_ARCHIVE_THRESHOLD THEN
|
||||||
|
vArchivalTriggeredBy := 'FILES_COUNT';
|
||||||
|
ELSIF vTableStat.OVER_ARCH_THRESOLD_ROW_COUNT >= vSourceFileConfig.ROWS_COUNT_OVER_ARCHIVE_THRESHOLD THEN
|
||||||
|
vArchivalTriggeredBy := 'ROWS_COUNT';
|
||||||
|
ELSIF vTableStat.OVER_ARCH_THRESOLD_SIZE >= vSourceFileConfig.BYTES_SUM_OVER_ARCHIVE_THRESHOLD THEN
|
||||||
|
vArchivalTriggeredBy := 'BYTES_SUM';
|
||||||
|
END IF;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why**: These strategies provide **performance optimization** by avoiding unnecessary archival operations when data volume is small.
|
||||||
|
|
||||||
|
**Configuration Example**:
|
||||||
|
```sql
|
||||||
|
-- Set archival thresholds for THRESHOLD_BASED strategy
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET FILES_COUNT_OVER_ARCHIVE_THRESHOLD = 10, -- Archive when 10+ files eligible
|
||||||
|
ROWS_COUNT_OVER_ARCHIVE_THRESHOLD = 100000, -- Archive when 100k+ rows eligible
|
||||||
|
BYTES_SUM_OVER_ARCHIVE_THRESHOLD = 104857600 -- Archive when 100MB+ eligible
|
||||||
|
WHERE ARCHIVAL_STRATEGY = 'THRESHOLD_BASED'
|
||||||
|
AND TABLE_ID = 'YOUR_TABLE';
|
||||||
|
```
|
||||||
|
|
||||||
|
**Important**: For **MINIMUM_AGE_MONTHS** strategy, these threshold values are **ignored** - archival proceeds based on age alone.
|
||||||
|
|
||||||
## Configuration Validation
|
## Configuration Validation
|
||||||
|
|
||||||
### Validation Trigger
|
### Validation Trigger
|
||||||
@@ -158,8 +241,132 @@ WHERE ...;
|
|||||||
-- Error: ORA-20001: Strategy MINIMUM_AGE_MONTHS requires MINIMUM_AGE_MONTHS to be set
|
-- Error: ORA-20001: Strategy MINIMUM_AGE_MONTHS requires MINIMUM_AGE_MONTHS to be set
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Archival Control Configuration
|
||||||
|
|
||||||
|
### ARCHIVE_ENABLED Column
|
||||||
|
|
||||||
|
Controls whether archival is enabled for specific table configuration.
|
||||||
|
|
||||||
|
**Column**: `A_SOURCE_FILE_CONFIG.ARCHIVE_ENABLED` (VARCHAR2(1), DEFAULT 'Y')
|
||||||
|
|
||||||
|
**Values**:
|
||||||
|
- `'Y'` (default) - Table is eligible for archival processing
|
||||||
|
- `'N'` - Table is excluded from archival (batch operations skip this config)
|
||||||
|
|
||||||
|
**Use Cases**:
|
||||||
|
- Disable archival for specific tables without removing configuration
|
||||||
|
- Temporarily suspend archival during data migration or troubleshooting
|
||||||
|
- Selective archival in batch operations
|
||||||
|
|
||||||
|
**Configuration Example**:
|
||||||
|
```sql
|
||||||
|
-- Disable archival for specific table
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET ARCHIVE_ENABLED = 'N'
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
AND SOURCE_FILE_ID = 'CSDB'
|
||||||
|
AND TABLE_ID = 'CSDB_DEBT';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- Re-enable archival
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET ARCHIVE_ENABLED = 'Y'
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
AND SOURCE_FILE_ID = 'CSDB'
|
||||||
|
AND TABLE_ID = 'CSDB_DEBT';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- Check archival status
|
||||||
|
SELECT
|
||||||
|
SOURCE_FILE_ID,
|
||||||
|
TABLE_ID,
|
||||||
|
ARCHIVE_ENABLED,
|
||||||
|
ARCHIVAL_STRATEGY
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
ORDER BY SOURCE_FILE_ID, TABLE_ID;
|
||||||
|
```
|
||||||
|
|
||||||
|
### KEEP_IN_TRASH Column
|
||||||
|
|
||||||
|
Controls TRASH folder retention policy for archived files.
|
||||||
|
|
||||||
|
**Column**: `A_SOURCE_FILE_CONFIG.KEEP_IN_TRASH` (VARCHAR2(1), DEFAULT 'Y')
|
||||||
|
|
||||||
|
**Values**:
|
||||||
|
- `'Y'` (default) - CSV files kept in TRASH folder after archival (status: ARCHIVED_AND_TRASHED)
|
||||||
|
- `'N'` - CSV files deleted from TRASH folder after archival (status: ARCHIVED_AND_PURGED)
|
||||||
|
|
||||||
|
**Benefits of TRASH Retention (TRUE)**:
|
||||||
|
- Safety net for rollback if archival issues discovered
|
||||||
|
- Supports compliance and audit requirements
|
||||||
|
- Enables file restoration via `RESTORE_FILE_FROM_TRASH` procedure
|
||||||
|
|
||||||
|
**Benefits of TRASH Cleanup (FALSE)**:
|
||||||
|
- Reduces storage costs in DATA bucket
|
||||||
|
- Simplifies bucket management
|
||||||
|
- Appropriate for non-critical or test data
|
||||||
|
|
||||||
|
**Configuration Example**:
|
||||||
|
```sql
|
||||||
|
-- Production: Keep files in TRASH (recommended)
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET KEEP_IN_TRASH = 'Y'
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
AND SOURCE_FILE_ID = 'LM'
|
||||||
|
AND TABLE_ID LIKE 'LM_%';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- Test environment: Cleanup TRASH to save storage
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET KEEP_IN_TRASH = 'N'
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
AND SOURCE_FILE_ID = 'TEST_SOURCE';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- Bulk configuration by source
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET KEEP_IN_TRASH = 'Y'
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
AND SOURCE_FILE_ID IN ('CSDB', 'C2D', 'LM');
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
## Data Lifecycle Workflow
|
## Data Lifecycle Workflow
|
||||||
|
|
||||||
|
### Status Tracking in A_SOURCE_FILE_RECEIVED
|
||||||
|
|
||||||
|
The FILE_ARCHIVER tracks file lifecycle through the `PROCESSING_STATUS` column in `CT_MRDS.A_SOURCE_FILE_RECEIVED` table:
|
||||||
|
|
||||||
|
**Status Progression**:
|
||||||
|
```
|
||||||
|
INGESTED → ARCHIVED_AND_TRASHED → ARCHIVED_AND_PURGED (optional)
|
||||||
|
↓
|
||||||
|
INGESTED (via RESTORE_FILE_FROM_TRASH)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Status Descriptions**:
|
||||||
|
- **INGESTED**: File successfully processed through Airflow+DBT, residing in ODS bucket
|
||||||
|
- **ARCHIVED_AND_TRASHED**: File archived to Parquet in ARCHIVE bucket, CSV retained in TRASH folder (DATA bucket)
|
||||||
|
- **ARCHIVED_AND_PURGED**: File archived to Parquet, CSV deleted from TRASH folder (when KEEP_IN_TRASH='N')
|
||||||
|
|
||||||
|
**Associated Columns Updated During Archival**:
|
||||||
|
```sql
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
SET PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED', -- Status change
|
||||||
|
ARCH_PATH = 'archive_directory_prefix/', -- Directory with Parquet files
|
||||||
|
PARTITION_YEAR = 2026, -- Year partition value
|
||||||
|
PARTITION_MONTH = 02 -- Month partition value
|
||||||
|
WHERE SOURCE_FILE_NAME = 'file.csv';
|
||||||
|
```
|
||||||
|
|
||||||
|
**ARCH_PATH Column**: Contains the **directory prefix** (URI) where archived Parquet files are located in the ARCHIVE bucket. Since `DBMS_CLOUD.EXPORT_DATA` may create multiple Parquet files with parallel execution, the system stores the directory location rather than individual filenames.
|
||||||
|
|
||||||
|
**Example ARCH_PATH**:
|
||||||
|
```
|
||||||
|
https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/namespace/b/archive/o/ARCHIVE/LM/STANDING_FACILITIES/PARTITION_YEAR=2026/PARTITION_MONTH=02/
|
||||||
|
```
|
||||||
|
|
||||||
### Standard File Processing Flow
|
### Standard File Processing Flow
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -183,9 +390,9 @@ WHERE ...;
|
|||||||
2.1 TRASH Subfolder (DATA Bucket - File Retention)
|
2.1 TRASH Subfolder (DATA Bucket - File Retention)
|
||||||
├─ Located in DATA bucket (e.g., TRASH/LM/TABLE_NAME)
|
├─ Located in DATA bucket (e.g., TRASH/LM/TABLE_NAME)
|
||||||
├─ Stores CSV files after archival to Parquet
|
├─ Stores CSV files after archival to Parquet
|
||||||
├─ Status: ARCHIVED_AND_TRASHED (default retention)
|
├─ Status: ARCHIVED_AND_TRASHED (default, controlled by KEEP_IN_TRASH config)
|
||||||
├─ Enables rollback if archival issues occur
|
├─ Enables rollback if archival issues occur
|
||||||
└─ Optional cleanup: ARCHIVED_AND_PURGED (pKeepInTrash=FALSE)
|
└─ Optional cleanup: ARCHIVED_AND_PURGED (when KEEP_IN_TRASH = 'N')
|
||||||
|
|
||||||
3. ARCHIVE Bucket (Long-term Storage)
|
3. ARCHIVE Bucket (Long-term Storage)
|
||||||
├─ Historical data in Parquet format
|
├─ Historical data in Parquet format
|
||||||
@@ -194,29 +401,48 @@ WHERE ...;
|
|||||||
└─ Optimized for big data analytics (Spark, Hive)
|
└─ Optimized for big data analytics (Spark, Hive)
|
||||||
|
|
||||||
**Key Procedures**:
|
**Key Procedures**:
|
||||||
- `ARCHIVE_TABLE_DATA(pSourceFileConfigKey, pKeepInTrash)` - Main archival procedure using strategy-specific WHERE clause
|
- `ARCHIVE_TABLE_DATA(pSourceFileConfigKey)` - Main archival procedure using strategy-specific WHERE clause
|
||||||
- `pKeepInTrash` (BOOLEAN, DEFAULT TRUE) - Controls TRASH folder retention
|
- TRASH folder retention controlled by `KEEP_IN_TRASH` column in A_SOURCE_FILE_CONFIG
|
||||||
- TRUE: Files kept in TRASH folder for safety and rollback capability (default)
|
- `ARCHIVE_ALL(pSourceFileConfigKey, pSourceKey, pArchiveAll)` - Batch archival with 3-level granularity and error handling
|
||||||
- FALSE: Files deleted from TRASH folder after successful archival
|
- **Level 3 (Highest Priority)**: Single configuration via `pSourceFileConfigKey`
|
||||||
|
- **Level 2 (Medium Priority)**: All configurations for source via `pSourceKey`
|
||||||
|
- **Level 1 (Lowest Priority)**: All configurations system-wide via `pArchiveAll`
|
||||||
|
- **Error Handling**: Continues processing other tables on individual failures
|
||||||
|
- **Filtering**: Respects `ARCHIVE_ENABLED='Y'` (skips disabled configurations)
|
||||||
|
- **Individual TRASH Policy**: Each table's `KEEP_IN_TRASH` setting applied independently
|
||||||
|
- **Summary Reporting**: Returns counts of Archived/Skipped/Failed tables
|
||||||
- `GET_ARCHIVAL_WHERE_CLAUSE` - Returns WHERE clause based on configured strategy
|
- `GET_ARCHIVAL_WHERE_CLAUSE` - Returns WHERE clause based on configured strategy
|
||||||
- `GATHER_TABLE_STAT` - Calculates archival statistics using strategy logic
|
- `GATHER_TABLE_STAT` - Calculates archival statistics using strategy logic
|
||||||
|
- `GATHER_TABLE_STAT_ALL(pSourceFileConfigKey, pSourceKey, pGatherAll)` - Batch statistics with 3-level granularity
|
||||||
|
- `RESTORE_FILE_FROM_TRASH(pSourceFileConfigKey, pSourceKey, pRestoreAll)` - Restore archived files from TRASH
|
||||||
|
- `PURGE_TRASH_FOLDER(pSourceFileConfigKey, pSourceKey, pPurgeAll)` - Purge TRASH folder with 3-level granularity
|
||||||
|
|
||||||
**Archival Execution**:
|
**Archival Execution**:
|
||||||
```sql
|
```sql
|
||||||
-- Default behavior: Keep files in TRASH folder (ARCHIVED_AND_TRASHED status)
|
-- Single table archival (TRASH retention controlled by KEEP_IN_TRASH config)
|
||||||
BEGIN
|
BEGIN
|
||||||
CT_MRDS.FILE_ARCHIVER.ARCHIVE_TABLE_DATA(
|
CT_MRDS.FILE_ARCHIVER.ARCHIVE_TABLE_DATA(
|
||||||
pSourceFileConfigKey => vSourceFileConfigKey,
|
pSourceFileConfigKey => vSourceFileConfigKey
|
||||||
pKeepInTrash => TRUE -- DEFAULT value
|
|
||||||
);
|
);
|
||||||
END;
|
END;
|
||||||
/
|
/
|
||||||
|
|
||||||
-- Optional: Delete files from TRASH after archival (ARCHIVED_AND_PURGED status)
|
-- Batch archival: All tables for specific source
|
||||||
BEGIN
|
BEGIN
|
||||||
CT_MRDS.FILE_ARCHIVER.ARCHIVE_TABLE_DATA(
|
CT_MRDS.FILE_ARCHIVER.ARCHIVE_ALL(
|
||||||
pSourceFileConfigKey => vSourceFileConfigKey,
|
pSourceFileConfigKey => NULL,
|
||||||
pKeepInTrash => FALSE -- Cleanup TRASH folder
|
pSourceKey => 'LM', -- Archive all LM tables
|
||||||
|
pArchiveAll => FALSE
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Batch archival: All tables system-wide
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.FILE_ARCHIVER.ARCHIVE_ALL(
|
||||||
|
pSourceFileConfigKey => NULL,
|
||||||
|
pSourceKey => NULL,
|
||||||
|
pArchiveAll => TRUE -- Archive all configured tables
|
||||||
);
|
);
|
||||||
END;
|
END;
|
||||||
/
|
/
|
||||||
@@ -225,10 +451,121 @@ END;
|
|||||||
**Strategy-Based Filtering**:
|
**Strategy-Based Filtering**:
|
||||||
- Package retrieves ARCHIVAL_STRATEGY from A_SOURCE_FILE_CONFIG
|
- Package retrieves ARCHIVAL_STRATEGY from A_SOURCE_FILE_CONFIG
|
||||||
- GET_ARCHIVAL_WHERE_CLAUSE generates appropriate WHERE clause
|
- GET_ARCHIVAL_WHERE_CLAUSE generates appropriate WHERE clause
|
||||||
|
- Only tables with ARCHIVE_ENABLED = 'Y' are processed
|
||||||
- Data matching criteria moved from ODS to ARCHIVE bucket
|
- Data matching criteria moved from ODS to ARCHIVE bucket
|
||||||
- CSV files moved to TRASH subfolder in DATA bucket (ODS/ → TRASH/)
|
- CSV files moved to TRASH subfolder in DATA bucket (ODS/ → TRASH/)
|
||||||
- Parquet format with Hive-style partitioning applied to ARCHIVE bucket
|
- Parquet format with Hive-style partitioning applied to ARCHIVE bucket
|
||||||
- TRASH retention controlled by pKeepInTrash parameter
|
- TRASH retention controlled by KEEP_IN_TRASH column in A_SOURCE_FILE_CONFIG
|
||||||
|
|
||||||
|
### Automatic Rollback Mechanism
|
||||||
|
|
||||||
|
FILE_ARCHIVER implements **automatic rollback** to ensure data integrity if archival process fails:
|
||||||
|
|
||||||
|
**Process Flow**:
|
||||||
|
1. **Export to ARCHIVE**: Data exported to Parquet format in ARCHIVE bucket
|
||||||
|
2. **Status Update**: A_SOURCE_FILE_RECEIVED records updated to 'ARCHIVED_AND_TRASHED'
|
||||||
|
3. **Move to TRASH**: CSV files moved from ODS to TRASH folder (DATA bucket)
|
||||||
|
4. **Optional Cleanup**: If KEEP_IN_TRASH='N', files deleted from TRASH
|
||||||
|
|
||||||
|
**Automatic Rollback Trigger**:
|
||||||
|
If **any error occurs** during step 3 (Move to TRASH), the system:
|
||||||
|
- **Reverts all files**: Moves successfully processed files from TRASH back to ODS
|
||||||
|
- **Rolls back status**: Resets A_SOURCE_FILE_RECEIVED status to 'INGESTED'
|
||||||
|
- **Logs error**: Records detailed error information in A_PROCESS_LOG
|
||||||
|
- **Raises exception**: Propagates error to calling process
|
||||||
|
|
||||||
|
**Rollback Logic (from code)**:
|
||||||
|
```sql
|
||||||
|
-- If MOVE_FILE_TO_TRASH fails for any file
|
||||||
|
ELSIF vProcessControlStatus = 'MOVE_FILE_TO_TRASH_FAILURE' THEN
|
||||||
|
FOR f in (files already moved to TRASH) LOOP
|
||||||
|
-- Move file back from TRASH to ODS
|
||||||
|
DBMS_CLOUD.MOVE_OBJECT(
|
||||||
|
source_object_uri => 'TRASH/.../filename',
|
||||||
|
target_object_uri => 'ODS/.../filename'
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Revert status back to INGESTED
|
||||||
|
UPDATE A_SOURCE_FILE_RECEIVED
|
||||||
|
SET PROCESSING_STATUS = 'INGESTED'
|
||||||
|
WHERE source_file_name = f.filename;
|
||||||
|
END LOOP;
|
||||||
|
END IF;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why This Matters**: Ensures **all-or-nothing** archival - either all files for a YEAR_MONTH partition are successfully archived, or **none** are (maintains data consistency).
|
||||||
|
|
||||||
|
### TRASH Management Procedures
|
||||||
|
|
||||||
|
#### RESTORE_FILE_FROM_TRASH
|
||||||
|
|
||||||
|
Restores files from TRASH folder back to ODS with **3-level granularity**:
|
||||||
|
|
||||||
|
**Level 3 (Highest Priority)** - Single File Restore:
|
||||||
|
```sql
|
||||||
|
-- Restore specific file by A_SOURCE_FILE_RECEIVED_KEY
|
||||||
|
CALL FILE_ARCHIVER.RESTORE_FILE_FROM_TRASH(
|
||||||
|
pSourceFileReceivedKey => 12345
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Level 2 (Medium Priority)** - Configuration-Based Restore:
|
||||||
|
```sql
|
||||||
|
-- Restore all files for specific table configuration
|
||||||
|
CALL FILE_ARCHIVER.RESTORE_FILE_FROM_TRASH(
|
||||||
|
pSourceFileConfigKey => 341
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Level 1 (Lowest Priority)** - Global Restore:
|
||||||
|
```sql
|
||||||
|
-- Restore ALL files with ARCHIVED_AND_TRASHED status system-wide
|
||||||
|
CALL FILE_ARCHIVER.RESTORE_FILE_FROM_TRASH(
|
||||||
|
pRestoreAll => TRUE
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Restore Operations**:
|
||||||
|
- **Moves files**: TRASH folder → ODS folder (using DBMS_CLOUD.MOVE_OBJECT)
|
||||||
|
- **Updates status**: ARCHIVED_AND_TRASHED → INGESTED
|
||||||
|
- **Clears metadata**: Sets ARCH_PATH, PARTITION_YEAR, PARTITION_MONTH to NULL
|
||||||
|
- **Returns files to active processing**: Makes data available for Airflow+DBT pipeline
|
||||||
|
|
||||||
|
#### PURGE_TRASH_FOLDER
|
||||||
|
|
||||||
|
Permanently deletes files from TRASH with **3-level granularity**:
|
||||||
|
|
||||||
|
**Level 3 (Highest Priority)** - Single File Purge:
|
||||||
|
```sql
|
||||||
|
-- Delete specific file from TRASH
|
||||||
|
CALL FILE_ARCHIVER.PURGE_TRASH_FOLDER(
|
||||||
|
pSourceFileReceivedKey => 12345
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Level 2 (Medium Priority)** - Configuration-Based Purge:
|
||||||
|
```sql
|
||||||
|
-- Delete all TRASH files for specific table configuration
|
||||||
|
CALL FILE_ARCHIVER.PURGE_TRASH_FOLDER(
|
||||||
|
pSourceFileConfigKey => 341
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Level 1 (Lowest Priority)** - Global Purge:
|
||||||
|
```sql
|
||||||
|
-- Delete ALL files with ARCHIVED_AND_TRASHED status system-wide
|
||||||
|
CALL FILE_ARCHIVER.PURGE_TRASH_FOLDER(
|
||||||
|
pPurgeAll => TRUE
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Purge Operations**:
|
||||||
|
- **Deletes files**: Permanently removes from TRASH folder (using DBMS_CLOUD.DELETE_OBJECT)
|
||||||
|
- **Updates status**: ARCHIVED_AND_TRASHED → ARCHIVED_AND_PURGED
|
||||||
|
- **Warning**: **Irreversible operation** - files cannot be restored after purge
|
||||||
|
- **Use case**: Storage optimization, compliance with data retention policies
|
||||||
|
|
||||||
|
**Important**: Purge is **not automatic** - must be explicitly called. This provides additional safety layer for data retention.
|
||||||
|
|
||||||
## Configuration Examples
|
## Configuration Examples
|
||||||
|
|
||||||
@@ -335,6 +672,56 @@ GROUP BY ARCHIVAL_STRATEGY
|
|||||||
ORDER BY ARCHIVAL_STRATEGY;
|
ORDER BY ARCHIVAL_STRATEGY;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Example 5: Configure Archival Control Settings
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Complete configuration with all archival settings
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
||||||
|
MINIMUM_AGE_MONTHS = 6,
|
||||||
|
ARCHIVE_ENABLED = 'Y', -- Enable archival
|
||||||
|
KEEP_IN_TRASH = 'Y' -- Keep files in TRASH for safety
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
AND SOURCE_FILE_ID = 'CSDB'
|
||||||
|
AND TABLE_ID = 'CSDB_DEBT';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- Disable archival temporarily for troubleshooting
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET ARCHIVE_ENABLED = 'N' -- Batch operations will skip this table
|
||||||
|
WHERE TABLE_ID = 'CSDB_DEBT';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- Configure TRASH cleanup for test environment
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET KEEP_IN_TRASH = 'N' -- Delete files from TRASH after archival
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
AND SOURCE_FILE_ID = 'TEST_SOURCE';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- View complete configuration
|
||||||
|
SELECT
|
||||||
|
SOURCE_FILE_ID,
|
||||||
|
TABLE_ID,
|
||||||
|
ARCHIVAL_STRATEGY,
|
||||||
|
MINIMUM_AGE_MONTHS,
|
||||||
|
ARCHIVE_ENABLED,
|
||||||
|
KEEP_IN_TRASH
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
ORDER BY SOURCE_FILE_ID, TABLE_ID;
|
||||||
|
|
||||||
|
-- Summary by archival status
|
||||||
|
SELECT
|
||||||
|
ARCHIVE_ENABLED,
|
||||||
|
KEEP_IN_TRASH,
|
||||||
|
COUNT(*) AS TABLE_COUNT
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
GROUP BY ARCHIVE_ENABLED, KEEP_IN_TRASH
|
||||||
|
ORDER BY ARCHIVE_ENABLED DESC, KEEP_IN_TRASH DESC;
|
||||||
|
```
|
||||||
|
|
||||||
## Release 01 Configuration
|
## Release 01 Configuration
|
||||||
|
|
||||||
### Configured Tables (MARS-828)
|
### Configured Tables (MARS-828)
|
||||||
@@ -425,7 +812,180 @@ SET ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS',
|
|||||||
WHERE ...;
|
WHERE ...;
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Issue 2: Archival Not Working as Expected
|
#### Issue 2: Archival Not Triggering Despite Configuration
|
||||||
|
|
||||||
|
**Scenario A**: **MINIMUM_AGE_MONTHS** strategy not archiving
|
||||||
|
```sql
|
||||||
|
-- Check files that should be archived
|
||||||
|
SELECT
|
||||||
|
SFR.A_SOURCE_FILE_RECEIVED_KEY,
|
||||||
|
SFR.SOURCE_FILE_NAME,
|
||||||
|
SFR.PROCESSING_STATUS,
|
||||||
|
LH.LOAD_START,
|
||||||
|
TRUNC(MONTHS_BETWEEN(SYSDATE, LH.LOAD_START)) AS MONTHS_AGE,
|
||||||
|
SFC.MINIMUM_AGE_MONTHS AS THRESHOLD
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED SFR
|
||||||
|
JOIN CT_ODS.A_LOAD_HISTORY LH ON SFR.A_WORKFLOW_HISTORY_KEY = LH.A_WORKFLOW_HISTORY_KEY
|
||||||
|
JOIN CT_MRDS.A_SOURCE_FILE_CONFIG SFC ON SFR.A_SOURCE_FILE_CONFIG_KEY = SFC.A_SOURCE_FILE_CONFIG_KEY
|
||||||
|
WHERE SFC.ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
|
AND SFR.PROCESSING_STATUS = 'INGESTED'
|
||||||
|
AND SFC.ARCHIVE_ENABLED = 'Y'
|
||||||
|
ORDER BY LH.LOAD_START;
|
||||||
|
|
||||||
|
-- Note: MINIMUM_AGE_MONTHS archives immediately (threshold-independent)
|
||||||
|
-- If files not archived, check ARCHIVE_ENABLED='Y' and run ARCHIVE_TABLE_DATA
|
||||||
|
```
|
||||||
|
|
||||||
|
**Scenario B**: **THRESHOLD_BASED** or **HYBRID** strategy not archiving
|
||||||
|
```sql
|
||||||
|
-- Check if threshold reached for specific configuration
|
||||||
|
SELECT
|
||||||
|
SFC.SOURCE_FILE_ID,
|
||||||
|
SFC.TABLE_ID,
|
||||||
|
SFC.ARCHIVAL_STRATEGY,
|
||||||
|
SFC.FILES_COUNT_OVER_ARCHIVE_THRESHOLD AS FILE_THRESHOLD,
|
||||||
|
SFC.ROWS_COUNT_OVER_ARCHIVE_THRESHOLD AS ROW_THRESHOLD,
|
||||||
|
SFC.BYTES_SUM_OVER_ARCHIVE_THRESHOLD AS BYTE_THRESHOLD,
|
||||||
|
COUNT(SFR.A_SOURCE_FILE_RECEIVED_KEY) AS CURRENT_FILES,
|
||||||
|
SUM(SFR.TOTAL_RECORDS) AS CURRENT_ROWS,
|
||||||
|
SUM(SFR.FILE_SIZE_BYTES) AS CURRENT_BYTES
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG SFC
|
||||||
|
LEFT JOIN CT_MRDS.A_SOURCE_FILE_RECEIVED SFR
|
||||||
|
ON SFC.A_SOURCE_FILE_CONFIG_KEY = SFR.A_SOURCE_FILE_CONFIG_KEY
|
||||||
|
AND SFR.PROCESSING_STATUS = 'INGESTED'
|
||||||
|
WHERE SFC.ARCHIVAL_STRATEGY IN ('THRESHOLD_BASED', 'HYBRID')
|
||||||
|
AND SFC.ARCHIVE_ENABLED = 'Y'
|
||||||
|
AND SFC.A_SOURCE_FILE_CONFIG_KEY = :yourConfigKey
|
||||||
|
GROUP BY
|
||||||
|
SFC.SOURCE_FILE_ID, SFC.TABLE_ID, SFC.ARCHIVAL_STRATEGY,
|
||||||
|
SFC.FILES_COUNT_OVER_ARCHIVE_THRESHOLD,
|
||||||
|
SFC.ROWS_COUNT_OVER_ARCHIVE_THRESHOLD,
|
||||||
|
SFC.BYTES_SUM_OVER_ARCHIVE_THRESHOLD;
|
||||||
|
|
||||||
|
-- Expected: At least ONE threshold (FILE/ROW/BYTE) must be exceeded
|
||||||
|
-- If no threshold exceeded, archival will NOT trigger (threshold-dependent behavior)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 3: ARCH_PATH Contains Directory Not Filename
|
||||||
|
|
||||||
|
**Symptoms**: A_SOURCE_FILE_RECEIVED.ARCH_PATH shows folder path instead of specific file
|
||||||
|
|
||||||
|
**Explanation**: This is **expected behavior**:
|
||||||
|
```sql
|
||||||
|
-- Example ARCH_PATH value
|
||||||
|
SELECT ARCH_PATH
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED'
|
||||||
|
AND ROWNUM = 1;
|
||||||
|
|
||||||
|
-- Result (example):
|
||||||
|
-- https://objectstorage.../ARCHIVE/LM/STANDING_FACILITIES/PARTITION_YEAR=2026/PARTITION_MONTH=02/
|
||||||
|
|
||||||
|
-- Reason: DBMS_CLOUD.EXPORT_DATA with parallel execution creates multiple Parquet files:
|
||||||
|
-- - STANDING_FACILITIES_part_00001.parquet
|
||||||
|
-- - STANDING_FACILITIES_part_00002.parquet
|
||||||
|
-- - ...
|
||||||
|
-- System stores directory prefix to track ALL generated files
|
||||||
|
```
|
||||||
|
|
||||||
|
**To List Actual Parquet Files**:
|
||||||
|
```sql
|
||||||
|
-- Use DBMS_CLOUD.LIST_OBJECTS with ARCH_PATH as prefix
|
||||||
|
SELECT object_name, bytes, created
|
||||||
|
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||||
|
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||||
|
location_uri => 'https://objectstorage.../b/archive/o/'
|
||||||
|
))
|
||||||
|
WHERE object_name LIKE 'ARCHIVE/LM/STANDING_FACILITIES/PARTITION_YEAR=2026/PARTITION_MONTH=02/%';
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 4: Files Remain in TRASH Folder
|
||||||
|
|
||||||
|
**Symptoms**: Files not deleted from TRASH after archival
|
||||||
|
|
||||||
|
**Cause**: Configuration has `KEEP_IN_TRASH='Y'` (retain files in TRASH)
|
||||||
|
|
||||||
|
**Verification**:
|
||||||
|
```sql
|
||||||
|
-- Check TRASH policy for configuration
|
||||||
|
SELECT
|
||||||
|
SOURCE_FILE_ID,
|
||||||
|
TABLE_ID,
|
||||||
|
KEEP_IN_TRASH,
|
||||||
|
CASE KEEP_IN_TRASH
|
||||||
|
WHEN 'Y' THEN 'Files RETAINED in TRASH (manual purge required)'
|
||||||
|
WHEN 'N' THEN 'Files DELETED immediately after archival'
|
||||||
|
END AS TRASH_BEHAVIOR
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
WHERE TABLE_ID = 'YOUR_TABLE';
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```sql
|
||||||
|
-- Option A: Change configuration to auto-delete (permanent change)
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET KEEP_IN_TRASH = 'N' -- Auto-delete from TRASH after archival
|
||||||
|
WHERE TABLE_ID = 'YOUR_TABLE';
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- Option B: Manually purge TRASH for specific table (one-time action)
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.FILE_ARCHIVER.PURGE_TRASH_FOLDER(
|
||||||
|
pSourceFileConfigKey => :yourConfigKey
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
|
||||||
|
-- Option C: Purge all TRASH system-wide (use with caution)
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.FILE_ARCHIVER.PURGE_TRASH_FOLDER(
|
||||||
|
pPurgeAll => TRUE
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 5: Automatic Rollback Occurred
|
||||||
|
|
||||||
|
**Symptoms**: Files unexpectedly back in INGESTED status, archival process reported failure
|
||||||
|
|
||||||
|
**Cause**: Error during "Move to TRASH" step triggered automatic rollback
|
||||||
|
|
||||||
|
**Investigation**:
|
||||||
|
```sql
|
||||||
|
-- Check process logs for rollback events
|
||||||
|
SELECT
|
||||||
|
PROCESS_LOG_KEY,
|
||||||
|
LOG_LEVEL,
|
||||||
|
LOG_MESSAGE,
|
||||||
|
PARAMETERS,
|
||||||
|
LOG_TIMESTAMP
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'ARCHIVE_TABLE_DATA'
|
||||||
|
AND LOG_MESSAGE LIKE '%rollback%' OR LOG_MESSAGE LIKE '%MOVE_FILE_TO_TRASH_FAILURE%'
|
||||||
|
ORDER BY LOG_TIMESTAMP DESC
|
||||||
|
FETCH FIRST 10 ROWS ONLY;
|
||||||
|
|
||||||
|
-- Check files that were rolled back
|
||||||
|
SELECT
|
||||||
|
A_SOURCE_FILE_RECEIVED_KEY,
|
||||||
|
SOURCE_FILE_NAME,
|
||||||
|
PROCESSING_STATUS, -- Should be INGESTED after rollback
|
||||||
|
ARCH_PATH, -- Should be NULL after rollback
|
||||||
|
PARTITION_YEAR, -- Should be NULL after rollback
|
||||||
|
PARTITION_MONTH -- Should be NULL after rollback
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
WHERE A_SOURCE_FILE_CONFIG_KEY = :yourConfigKey
|
||||||
|
AND UPDATED_AT > SYSDATE - 1 -- Last 24 hours
|
||||||
|
ORDER BY UPDATED_AT DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Resolution**:
|
||||||
|
1. **Investigate root cause**: Check error messages in A_PROCESS_LOG
|
||||||
|
2. **Fix underlying issue**: OCI permissions, bucket access, wrong credentials, etc.
|
||||||
|
3. **Re-run archival**: Call ARCHIVE_TABLE_DATA again after fix
|
||||||
|
|
||||||
|
#### Issue 6: Archival Not Working as Expected
|
||||||
|
|
||||||
**Symptoms**: Data not being archived according to strategy
|
**Symptoms**: Data not being archived according to strategy
|
||||||
|
|
||||||
@@ -495,9 +1055,156 @@ FROM user_objects
|
|||||||
WHERE object_name = 'FILE_ARCHIVER';
|
WHERE object_name = 'FILE_ARCHIVER';
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Diagnostic Queries for Monitoring
|
||||||
|
|
||||||
|
#### Query 1: Status Distribution Across All Files
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Overall file status distribution
|
||||||
|
SELECT
|
||||||
|
PROCESSING_STATUS,
|
||||||
|
COUNT(*) AS FILE_COUNT,
|
||||||
|
ROUND(COUNT(*) * 100.0 / SUM(COUNT(*)) OVER (), 2) AS PERCENTAGE,
|
||||||
|
MIN(CREATED_AT) AS OLDEST_FILE,
|
||||||
|
MAX(CREATED_AT) AS NEWEST_FILE
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||||
|
GROUP BY PROCESSING_STATUS
|
||||||
|
ORDER BY FILE_COUNT DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Query 2: Files in TRASH (Archived but Not Purged)
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Files currently in TRASH folder (status ARCHIVED_AND_TRASHED)
|
||||||
|
SELECT
|
||||||
|
SFR.A_SOURCE_FILE_RECEIVED_KEY,
|
||||||
|
SFC.SOURCE_FILE_ID,
|
||||||
|
SFC.TABLE_ID,
|
||||||
|
SFR.SOURCE_FILE_NAME,
|
||||||
|
SFR.ARCH_PATH,
|
||||||
|
SFR.PARTITION_YEAR,
|
||||||
|
SFR.PARTITION_MONTH,
|
||||||
|
SFR.FILE_SIZE_BYTES,
|
||||||
|
SFR.UPDATED_AT AS ARCHIVED_AT,
|
||||||
|
TRUNC(SYSDATE - SFR.UPDATED_AT) AS DAYS_IN_TRASH,
|
||||||
|
SFC.KEEP_IN_TRASH AS TRASH_POLICY
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED SFR
|
||||||
|
JOIN CT_MRDS.A_SOURCE_FILE_CONFIG SFC ON SFR.A_SOURCE_FILE_CONFIG_KEY = SFC.A_SOURCE_FILE_CONFIG_KEY
|
||||||
|
WHERE SFR.PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED'
|
||||||
|
ORDER BY SFR.UPDATED_AT DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Query 3: Archival Activity by Configuration
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Archival statistics per table configuration
|
||||||
|
SELECT
|
||||||
|
SFC.SOURCE_FILE_ID,
|
||||||
|
SFC.TABLE_ID,
|
||||||
|
SFC.ARCHIVAL_STRATEGY,
|
||||||
|
SFC.ARCHIVE_ENABLED,
|
||||||
|
SFC.KEEP_IN_TRASH,
|
||||||
|
COUNT(CASE WHEN SFR.PROCESSING_STATUS = 'INGESTED' THEN 1 END) AS PENDING_ARCHIVE,
|
||||||
|
COUNT(CASE WHEN SFR.PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED' THEN 1 END) AS IN_TRASH,
|
||||||
|
COUNT(CASE WHEN SFR.PROCESSING_STATUS = 'ARCHIVED_AND_PURGED' THEN 1 END) AS PURGED,
|
||||||
|
MAX(SFR.UPDATED_AT) FILTER (WHERE SFR.PROCESSING_STATUS LIKE 'ARCHIVED%') AS LAST_ARCHIVAL
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG SFC
|
||||||
|
LEFT JOIN CT_MRDS.A_SOURCE_FILE_RECEIVED SFR ON SFC.A_SOURCE_FILE_CONFIG_KEY = SFR.A_SOURCE_FILE_CONFIG_KEY
|
||||||
|
WHERE SFC.SOURCE_FILE_TYPE = 'INPUT'
|
||||||
|
GROUP BY
|
||||||
|
SFC.SOURCE_FILE_ID, SFC.TABLE_ID, SFC.ARCHIVAL_STRATEGY,
|
||||||
|
SFC.ARCHIVE_ENABLED, SFC.KEEP_IN_TRASH
|
||||||
|
ORDER BY SFC.SOURCE_FILE_ID, SFC.TABLE_ID;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Query 4: Files Eligible for Archival (MINIMUM_AGE_MONTHS)
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Files that should be archived based on MINIMUM_AGE_MONTHS strategy
|
||||||
|
SELECT
|
||||||
|
SFC.SOURCE_FILE_ID,
|
||||||
|
SFC.TABLE_ID,
|
||||||
|
SFC.MINIMUM_AGE_MONTHS AS AGE_THRESHOLD,
|
||||||
|
COUNT(*) AS ELIGIBLE_FILES,
|
||||||
|
SUM(SFR.FILE_SIZE_BYTES) AS TOTAL_SIZE_BYTES,
|
||||||
|
SUM(SFR.TOTAL_RECORDS) AS TOTAL_ROWS,
|
||||||
|
MIN(LH.LOAD_START) AS OLDEST_FILE,
|
||||||
|
MAX(LH.LOAD_START) AS NEWEST_ELIGIBLE
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_CONFIG SFC
|
||||||
|
JOIN CT_MRDS.A_SOURCE_FILE_RECEIVED SFR ON SFC.A_SOURCE_FILE_CONFIG_KEY = SFR.A_SOURCE_FILE_CONFIG_KEY
|
||||||
|
JOIN CT_ODS.A_LOAD_HISTORY LH ON SFR.A_WORKFLOW_HISTORY_KEY = LH.A_WORKFLOW_HISTORY_KEY
|
||||||
|
WHERE SFC.ARCHIVAL_STRATEGY = 'MINIMUM_AGE_MONTHS'
|
||||||
|
AND SFC.ARCHIVE_ENABLED = 'Y'
|
||||||
|
AND SFR.PROCESSING_STATUS = 'INGESTED'
|
||||||
|
AND LH.LOAD_START < ADD_MONTHS(TRUNC(SYSDATE, 'MM'), -SFC.MINIMUM_AGE_MONTHS)
|
||||||
|
GROUP BY SFC.SOURCE_FILE_ID, SFC.TABLE_ID, SFC.MINIMUM_AGE_MONTHS
|
||||||
|
ORDER BY ELIGIBLE_FILES DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Query 5: Archival Performance Metrics
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Recent archival operations with timing
|
||||||
|
SELECT
|
||||||
|
PROCESS_LOG_KEY,
|
||||||
|
SUBSTR(PARAMETERS, 1, 100) AS CONFIG_INFO,
|
||||||
|
LOG_TIMESTAMP AS START_TIME,
|
||||||
|
LEAD(LOG_TIMESTAMP) OVER (PARTITION BY SUBSTR(PARAMETERS, 1, 100) ORDER BY LOG_TIMESTAMP) AS END_TIME,
|
||||||
|
ROUND((LEAD(LOG_TIMESTAMP) OVER (PARTITION BY SUBSTR(PARAMETERS, 1, 100) ORDER BY LOG_TIMESTAMP)
|
||||||
|
- LOG_TIMESTAMP) * 24 * 60, 2) AS DURATION_MINUTES,
|
||||||
|
CASE
|
||||||
|
WHEN LOG_LEVEL = 'ERROR' THEN 'FAILED'
|
||||||
|
WHEN LOG_MESSAGE LIKE '%Archival completed%' THEN 'SUCCESS'
|
||||||
|
ELSE 'IN_PROGRESS'
|
||||||
|
END AS STATUS
|
||||||
|
FROM CT_MRDS.A_PROCESS_LOG
|
||||||
|
WHERE PROCESS_NAME = 'ARCHIVE_TABLE_DATA'
|
||||||
|
AND LOG_TIMESTAMP > SYSDATE - 7 -- Last 7 days
|
||||||
|
ORDER BY LOG_TIMESTAMP DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Query 6: TRASH Storage Usage
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Estimate TRASH folder storage usage
|
||||||
|
SELECT
|
||||||
|
SFC.SOURCE_FILE_ID,
|
||||||
|
COUNT(*) AS FILES_IN_TRASH,
|
||||||
|
ROUND(SUM(SFR.FILE_SIZE_BYTES) / 1024 / 1024 / 1024, 2) AS SIZE_GB,
|
||||||
|
MIN(SFR.UPDATED_AT) AS OLDEST_IN_TRASH,
|
||||||
|
MAX(SFR.UPDATED_AT) AS NEWEST_IN_TRASH,
|
||||||
|
SFC.KEEP_IN_TRASH AS POLICY
|
||||||
|
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED SFR
|
||||||
|
JOIN CT_MRDS.A_SOURCE_FILE_CONFIG SFC ON SFR.A_SOURCE_FILE_CONFIG_KEY = SFC.A_SOURCE_FILE_CONFIG_KEY
|
||||||
|
WHERE SFR.PROCESSING_STATUS = 'ARCHIVED_AND_TRASHED'
|
||||||
|
GROUP BY SFC.SOURCE_FILE_ID, SFC.KEEP_IN_TRASH
|
||||||
|
ORDER BY SIZE_GB DESC;
|
||||||
|
```
|
||||||
|
|
||||||
## Version History
|
## Version History
|
||||||
|
|
||||||
### v3.1.0 (Current - 2026-02-05)
|
### v3.3.0 (Current - 2026-02-11)
|
||||||
|
- **BREAKING CHANGE**: Removed `pKeepInTrash` parameter from ARCHIVE_TABLE_DATA
|
||||||
|
- Added `ARCHIVE_ENABLED` column to A_SOURCE_FILE_CONFIG for selective archiving control
|
||||||
|
- Added `KEEP_IN_TRASH` column to A_SOURCE_FILE_CONFIG (replaces pKeepInTrash parameter)
|
||||||
|
- Added batch procedures with 3-level granularity (config/source/all):
|
||||||
|
- ARCHIVE_ALL - Batch archival procedure
|
||||||
|
- GATHER_TABLE_STAT_ALL - Batch statistics procedure
|
||||||
|
- RESTORE_FILE_FROM_TRASH - Restore files from TRASH folder
|
||||||
|
- PURGE_TRASH_FOLDER - Purge TRASH folder files
|
||||||
|
- TRASH retention now configuration-based instead of parameter-based
|
||||||
|
- Enhanced flexibility for archival orchestration and monitoring
|
||||||
|
|
||||||
|
### v3.2.1 (2026-02-10)
|
||||||
|
- Fixed critical bug: Status update ARCHIVED → ARCHIVED_AND_TRASHED when moving files to TRASH folder
|
||||||
|
- Ensures proper status tracking for files retained in TRASH
|
||||||
|
|
||||||
|
### v3.2.0 (2026-02-06)
|
||||||
|
- Added `pKeepInTrash` parameter (DEFAULT TRUE) to ARCHIVE_TABLE_DATA
|
||||||
|
- TRASH folder retention control for safety and compliance
|
||||||
|
- Files kept in TRASH subfolder by default for rollback capability
|
||||||
|
|
||||||
|
### v3.1.0 (2026-02-05)
|
||||||
- **BREAKING CHANGE**: Removed CURRENT_MONTH_ONLY strategy (replaced by MINIMUM_AGE_MONTHS = 0)
|
- **BREAKING CHANGE**: Removed CURRENT_MONTH_ONLY strategy (replaced by MINIMUM_AGE_MONTHS = 0)
|
||||||
- Mathematical equivalence: CURRENT_MONTH_ONLY ≡ MINIMUM_AGE_MONTHS = 0
|
- Mathematical equivalence: CURRENT_MONTH_ONLY ≡ MINIMUM_AGE_MONTHS = 0
|
||||||
- Updated trigger validation to allow MINIMUM_AGE_MONTHS >= 0 (previously >= 1)
|
- Updated trigger validation to allow MINIMUM_AGE_MONTHS >= 0 (previously >= 1)
|
||||||
@@ -567,9 +1274,7 @@ WHERE object_name = 'FILE_ARCHIVER';
|
|||||||
- Example: CSDB securities data (MINIMUM_AGE_MONTHS = 6)
|
- Example: CSDB securities data (MINIMUM_AGE_MONTHS = 6)
|
||||||
|
|
||||||
2. **Use THRESHOLD_BASED when**:
|
2. **Use THRESHOLD_BASED when**:
|
||||||
- Maintaining backward compatibility with legacy behavior
|
|
||||||
- Simple time-based archival is sufficient
|
- Simple time-based archival is sufficient
|
||||||
- Migration from FILE_ARCHIVER v2.0.0
|
|
||||||
|
|
||||||
3. **Use HYBRID when**:
|
3. **Use HYBRID when**:
|
||||||
- Complex retention requirements
|
- Complex retention requirements
|
||||||
@@ -632,18 +1337,30 @@ WHERE object_name = 'FILE_ARCHIVER';
|
|||||||
|
|
||||||
### TRASH Folder Retention Best Practices
|
### TRASH Folder Retention Best Practices
|
||||||
|
|
||||||
1. **Default Behavior (pKeepInTrash = TRUE - Recommended)**:
|
1. **Default Behavior (KEEP_IN_TRASH = 'Y' - Recommended)**:
|
||||||
- Keeps CSV files in TRASH folder after archival
|
- Keeps CSV files in TRASH folder after archival
|
||||||
- Provides safety net for rollback if archival issues occur
|
- Provides safety net for rollback if archival issues occur
|
||||||
- Supports compliance and audit requirements
|
- Supports compliance and audit requirements
|
||||||
- Status: ARCHIVED_AND_TRASHED
|
- Status: ARCHIVED_AND_TRASHED
|
||||||
- Use for: Production environments, regulatory compliance, critical data
|
- Use for: Production environments, regulatory compliance, critical data
|
||||||
|
- Configuration:
|
||||||
|
```sql
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET KEEP_IN_TRASH = 'Y'
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT' AND TABLE_ID = 'YOUR_TABLE';
|
||||||
|
```
|
||||||
|
|
||||||
2. **TRASH Cleanup (pKeepInTrash = FALSE)**:
|
2. **TRASH Cleanup (KEEP_IN_TRASH = 'N')**:
|
||||||
- Deletes CSV files from TRASH folder after successful archival
|
- Deletes CSV files from TRASH folder after successful archival
|
||||||
- Reduces storage costs in DATA bucket
|
- Reduces storage costs in DATA bucket
|
||||||
- Status: ARCHIVED_AND_PURGED
|
- Status: ARCHIVED_AND_PURGED
|
||||||
- Use for: Non-critical data, storage optimization, test environments
|
- Use for: Non-critical data, storage optimization, test environments
|
||||||
|
- Configuration:
|
||||||
|
```sql
|
||||||
|
UPDATE CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||||
|
SET KEEP_IN_TRASH = 'N'
|
||||||
|
WHERE SOURCE_FILE_TYPE = 'INPUT' AND TABLE_ID = 'YOUR_TABLE';
|
||||||
|
```
|
||||||
|
|
||||||
3. **Monitoring TRASH Folder**:
|
3. **Monitoring TRASH Folder**:
|
||||||
```sql
|
```sql
|
||||||
@@ -676,7 +1393,7 @@ WHERE object_name = 'FILE_ARCHIVER';
|
|||||||
## Author
|
## Author
|
||||||
|
|
||||||
Created by: Grzegorz Michalski
|
Created by: Grzegorz Michalski
|
||||||
Date: 2026-02-06
|
Date: 2026-02-11
|
||||||
Schema: CT_MRDS
|
Schema: CT_MRDS
|
||||||
Package: FILE_ARCHIVER
|
Package: FILE_ARCHIVER
|
||||||
Version: 3.2.0
|
Version: 3.3.0
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# System Migration: Informatica + WLA → Airflow + DBT
|
# System Migration: Informatica + WLA → Airflow + DBT
|
||||||
|
|
||||||
This document describes the migration from the legacy Informatica + WLA data processing system to the modern Airflow + DBT architecture, including control table differences, data export strategies, and known limitations.
|
This document describes the migration from the legacy Informatica + WLA data processing system to the new Airflow + DBT architecture, including control table differences, data export strategies, and known limitations.
|
||||||
|
|
||||||
## Migration Overview
|
## Migration Overview
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ The MRDS (Market Reference Data System) is undergoing a fundamental technology m
|
|||||||
- Primary Control Table: `CT_ODS.A_LOAD_HISTORY`
|
- Primary Control Table: `CT_ODS.A_LOAD_HISTORY`
|
||||||
- Key Column: `A_ETL_LOAD_SET_KEY`
|
- Key Column: `A_ETL_LOAD_SET_KEY`
|
||||||
|
|
||||||
**Modern System (Airflow + DBT):**
|
**New System (Airflow + DBT):**
|
||||||
- Orchestration: Apache Airflow
|
- Orchestration: Apache Airflow
|
||||||
- Transformation: DBT (Data Build Tool)
|
- Transformation: DBT (Data Build Tool)
|
||||||
- Control Schema: `CT_MRDS` (MRDS Control)
|
- Control Schema: `CT_MRDS` (MRDS Control)
|
||||||
@@ -49,7 +49,7 @@ DQ_FLAG VARCHAR2(5) -- Data quality flag
|
|||||||
- Used for temporal partitioning in DATA_EXPORTER
|
- Used for temporal partitioning in DATA_EXPORTER
|
||||||
- Referenced via `A_ETL_LOAD_SET_KEY_FK` foreign key in data tables
|
- Referenced via `A_ETL_LOAD_SET_KEY_FK` foreign key in data tables
|
||||||
|
|
||||||
### Modern System: CT_MRDS Control Tables
|
### New System: CT_MRDS Control Tables
|
||||||
|
|
||||||
#### 1. A_SOURCE_FILE_RECEIVED
|
#### 1. A_SOURCE_FILE_RECEIVED
|
||||||
|
|
||||||
@@ -126,7 +126,7 @@ END;
|
|||||||
|
|
||||||
**Result**: CSV files in ODS bucket (DATA area), partitioned by LOAD_START from A_LOAD_HISTORY
|
**Result**: CSV files in ODS bucket (DATA area), partitioned by LOAD_START from A_LOAD_HISTORY
|
||||||
|
|
||||||
### Scenario 2: Modern System Data (Airflow + DBT → ODS → ARCHIVE)
|
### Scenario 2: New System Data (Airflow + DBT → ODS → ARCHIVE)
|
||||||
|
|
||||||
**Use Case**: Ongoing processing with new Airflow + DBT system
|
**Use Case**: Ongoing processing with new Airflow + DBT system
|
||||||
|
|
||||||
@@ -150,104 +150,91 @@ END;
|
|||||||
/
|
/
|
||||||
```
|
```
|
||||||
|
|
||||||
## Critical Gap: Legacy Data Archival
|
## Legacy Data Archival
|
||||||
|
|
||||||
### Problem Statement
|
### FILE_ARCHIVER Requirement
|
||||||
|
|
||||||
**Scenario**: Historical data exported using DATA_EXPORTER from Informatica-loaded tables
|
⚠️ **IMPORTANT**: FILE_ARCHIVER requires records in `A_SOURCE_FILE_RECEIVED` table to track and manage archival lifecycle.
|
||||||
|
|
||||||
**Issue**: FILE_ARCHIVER requires records in `A_SOURCE_FILE_RECEIVED`, but legacy exports don't create them
|
**For new system data (Airflow + DBT)**:
|
||||||
|
- Records automatically created by `FILE_MANAGER.PROCESS_SOURCE_FILE`
|
||||||
|
- No additional steps needed
|
||||||
|
|
||||||
**Impact**: Legacy data exported to ODS/DATA bucket **CANNOT** be archived to ARCHIVE bucket using FILE_ARCHIVER
|
**For legacy data (Informatica + WLA)**:
|
||||||
|
- Historical data requires registration in `A_SOURCE_FILE_RECEIVED`
|
||||||
|
- ✅ **SOLUTION**: Use DATA_EXPORTER v2.9.0+ with `pRegisterExport => TRUE` parameter
|
||||||
|
- Automatically registers exported files with proper metadata (size, checksum, location)
|
||||||
|
|
||||||
### Technical Analysis
|
### Export Strategies for Legacy Data
|
||||||
|
|
||||||
**DATA_EXPORTER Behavior**:
|
#### Strategy 1: Automatic Registration (Recommended)
|
||||||
```sql
|
|
||||||
-- Uses A_LOAD_HISTORY for partitioning (Informatica workflows)
|
|
||||||
SELECT DISTINCT TO_CHAR(L.LOAD_START,'YYYY') AS YR,
|
|
||||||
TO_CHAR(L.LOAD_START,'MM') AS MN
|
|
||||||
FROM OU_TOP.AGGREGATED_ALLOTMENT T, CT_ODS.A_LOAD_HISTORY L
|
|
||||||
WHERE T.A_ETL_LOAD_SET_KEY_FK = L.A_ETL_LOAD_SET_KEY
|
|
||||||
AND L.LOAD_START >= :pMinDate
|
|
||||||
AND L.LOAD_START < :pMaxDate;
|
|
||||||
|
|
||||||
-- Creates CSV files: ODS/legacy_migration/AGGREGATED_ALLOTMENT_YYYYMM.csv
|
✅ **DATA_EXPORTER v2.9.0+** supports automatic file registration via `pRegisterExport` parameter.
|
||||||
-- Does NOT create A_SOURCE_FILE_RECEIVED records
|
|
||||||
```
|
|
||||||
|
|
||||||
**FILE_ARCHIVER Requirement**:
|
**Benefits**:
|
||||||
```sql
|
- Simple, one-step export with automatic registration
|
||||||
-- Joins A_SOURCE_FILE_RECEIVED with A_WORKFLOW_HISTORY
|
- Files tracked in `A_SOURCE_FILE_RECEIVED` (enables FILE_ARCHIVER processing)
|
||||||
JOIN CT_MRDS.A_SOURCE_FILE_RECEIVED r
|
- Proper metadata capture (file size, checksum, location, timestamps)
|
||||||
ON r.A_SOURCE_FILE_CONFIG_KEY = pSourceFileConfig.A_SOURCE_FILE_CONFIG_KEY
|
- Standard workflow integration (archival strategies, status tracking)
|
||||||
AND r.PROCESSING_STATUS = 'INGESTED';
|
|
||||||
|
|
||||||
-- Without A_SOURCE_FILE_RECEIVED records, archival CANNOT proceed
|
**Example - CSV Export with Registration**:
|
||||||
```
|
|
||||||
|
|
||||||
### Workaround Strategies
|
|
||||||
|
|
||||||
#### Strategy 1: Manual Registration (Recommended for Small Datasets)
|
|
||||||
|
|
||||||
Manually create `A_SOURCE_FILE_RECEIVED` records for legacy exported files:
|
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
-- Step 1: Export legacy data to ODS/DATA
|
-- Export with automatic registration (DATA_EXPORTER v2.9.0+)
|
||||||
BEGIN
|
BEGIN
|
||||||
DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||||
pSchemaName => 'OU_TOP',
|
pSchemaName => 'OU_TOP',
|
||||||
pTableName => 'AGGREGATED_ALLOTMENT',
|
pTableName => 'AGGREGATED_ALLOTMENT',
|
||||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
pBucketArea => 'DATA',
|
pBucketArea => 'DATA',
|
||||||
pFolderName => 'legacy_export',
|
pFolderName => 'legacy_export',
|
||||||
pMinDate => DATE '2024-01-01',
|
pMinDate => DATE '2024-01-01',
|
||||||
pMaxDate => DATE '2024-12-31'
|
pMaxDate => DATE '2024-12-31',
|
||||||
|
pRegisterExport => TRUE, -- ✓ Automatically registers files
|
||||||
|
pProcessName => 'LEGACY_MIGRATION'
|
||||||
);
|
);
|
||||||
END;
|
END;
|
||||||
/
|
/
|
||||||
|
|
||||||
-- Step 2: List exported CSV files
|
-- Files now registered in A_SOURCE_FILE_RECEIVED with:
|
||||||
SELECT object_name, time_created, bytes
|
-- - SOURCE_FILE_NAME: Full OCI path
|
||||||
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
|
-- - PROCESSING_STATUS: 'INGESTED'
|
||||||
credential_name => 'DEF_CRED_ARN',
|
-- - BYTES: Actual file size
|
||||||
location_uri => 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frtgjxu7zl7c/b/data/o/'
|
-- - CHECKSUM: File ETag from OCI
|
||||||
)) WHERE object_name LIKE 'ODS/legacy_export/AGGREGATED_ALLOTMENT_%';
|
-- - PROCESS_NAME: 'LEGACY_MIGRATION'
|
||||||
|
|
||||||
-- Step 3: Manually register each file in A_SOURCE_FILE_RECEIVED
|
-- Now FILE_ARCHIVER can process these files
|
||||||
-- (Requires source configuration for AGGREGATED_ALLOTMENT to exist)
|
|
||||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
|
||||||
A_SOURCE_FILE_RECEIVED_KEY,
|
|
||||||
A_SOURCE_FILE_CONFIG_KEY,
|
|
||||||
SOURCE_FILE_NAME,
|
|
||||||
PROCESSING_STATUS,
|
|
||||||
RECEPTION_DATE,
|
|
||||||
BYTES,
|
|
||||||
CHECKSUM,
|
|
||||||
EXTERNAL_TABLE_NAME
|
|
||||||
) VALUES (
|
|
||||||
A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL,
|
|
||||||
(SELECT A_SOURCE_FILE_CONFIG_KEY FROM A_SOURCE_FILE_CONFIG
|
|
||||||
WHERE SOURCE_FILE_ID = 'AGGREGATED_ALLOTMENT' AND SOURCE_FILE_TYPE = 'INPUT'),
|
|
||||||
'ODS/legacy_export/AGGREGATED_ALLOTMENT_202401.csv',
|
|
||||||
'INGESTED', -- Skip validation, mark as already ingested
|
|
||||||
DATE '2024-01-15',
|
|
||||||
1048576, -- File size in bytes
|
|
||||||
'manual_registration',
|
|
||||||
NULL -- No external table needed
|
|
||||||
);
|
|
||||||
-- Repeat for all exported CSV files
|
|
||||||
COMMIT;
|
|
||||||
|
|
||||||
-- Step 4: Now FILE_ARCHIVER can process these files
|
|
||||||
BEGIN
|
BEGIN
|
||||||
FILE_ARCHIVER.ARCHIVE_TABLE_DATA(pSourceFileConfig => vConfig);
|
CT_MRDS.FILE_ARCHIVER.ARCHIVE_TABLE_DATA(
|
||||||
|
pSourceFileConfigKey => vConfigKey
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
/
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example - Single CSV Export with Registration**:
|
||||||
|
```sql
|
||||||
|
-- For single file export (not partitioned by date)
|
||||||
|
BEGIN
|
||||||
|
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||||
|
pSchemaName => 'CT_MRDS',
|
||||||
|
pTableName => 'MY_TABLE',
|
||||||
|
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||||
|
pBucketArea => 'DATA',
|
||||||
|
pFolderName => 'legacy_export',
|
||||||
|
pFileName => 'my_table_export.csv',
|
||||||
|
pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE',
|
||||||
|
pRegisterExport => TRUE, -- ✓ Registers file
|
||||||
|
pProcessName => 'LEGACY_MIGRATION'
|
||||||
|
);
|
||||||
END;
|
END;
|
||||||
/
|
/
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Strategy 2: Direct Archive Export (Bypass ODS)
|
#### Strategy 2: Direct Archive Export (Bypass ODS)
|
||||||
|
|
||||||
|
⚠️ **Use when**: You want to skip the ODS bucket entirely and go straight to ARCHIVE
|
||||||
|
|
||||||
Skip ODS/DATA bucket entirely - export directly to ARCHIVE bucket in Parquet format:
|
Skip ODS/DATA bucket entirely - export directly to ARCHIVE bucket in Parquet format:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
@@ -411,18 +398,18 @@ CALL FILE_MANAGER.ADD_SOURCE_FILE_CONFIG(
|
|||||||
|
|
||||||
## Known Limitations
|
## Known Limitations
|
||||||
|
|
||||||
### 1. No Retroactive A_SOURCE_FILE_RECEIVED Creation
|
### 1. FILE_ARCHIVER Requires A_SOURCE_FILE_RECEIVED
|
||||||
DATA_EXPORTER does not automatically create A_SOURCE_FILE_RECEIVED records when exporting legacy data. This is by design - it's a one-time export tool, not a file tracking system.
|
FILE_ARCHIVER cannot archive data without corresponding A_SOURCE_FILE_RECEIVED records.
|
||||||
|
|
||||||
### 2. FILE_ARCHIVER Requires A_SOURCE_FILE_RECEIVED
|
**Solutions**:
|
||||||
FILE_ARCHIVER cannot archive data without corresponding A_SOURCE_FILE_RECEIVED records. This prevents archiving of:
|
- ✅ **New system data**: Automatically registered via `FILE_MANAGER.PROCESS_SOURCE_FILE`
|
||||||
- Legacy Informatica-loaded data exported via DATA_EXPORTER
|
- ✅ **Legacy data exports**: Use `DATA_EXPORTER` with `pRegisterExport => TRUE` (v2.9.0+)
|
||||||
- Manually uploaded files not processed through FILE_MANAGER.PROCESS_SOURCE_FILE
|
- ⚠️ **Manual uploads**: Must be registered via `FILE_MANAGER.PROCESS_SOURCE_FILE` or manual INSERT
|
||||||
|
|
||||||
### 3. Mixed Control Table References
|
### 2. Mixed Control Table References
|
||||||
During migration period, some procedures reference A_LOAD_HISTORY (DATA_EXPORTER) while others reference A_WORKFLOW_HISTORY (FILE_ARCHIVER). This is intentional but requires careful understanding of data lineage.
|
During migration period, some procedures reference A_LOAD_HISTORY (DATA_EXPORTER) while others reference A_WORKFLOW_HISTORY (FILE_ARCHIVER). This is intentional but requires careful understanding of data lineage.
|
||||||
|
|
||||||
### 4. A_WORKFLOW_HISTORY vs A_LOAD_HISTORY Column Mismatch
|
### 3. A_WORKFLOW_HISTORY vs A_LOAD_HISTORY Column Mismatch
|
||||||
The control tables have different schemas:
|
The control tables have different schemas:
|
||||||
- **A_LOAD_HISTORY**: `LOAD_START`, `A_ETL_LOAD_SET_KEY`
|
- **A_LOAD_HISTORY**: `LOAD_START`, `A_ETL_LOAD_SET_KEY`
|
||||||
- **A_WORKFLOW_HISTORY**: `WORKFLOW_START`, `A_WORKFLOW_HISTORY_KEY`
|
- **A_WORKFLOW_HISTORY**: `WORKFLOW_START`, `A_WORKFLOW_HISTORY_KEY`
|
||||||
@@ -445,4 +432,8 @@ The migration from Informatica + WLA to Airflow + DBT introduces new control tab
|
|||||||
- **Archival Operations**: Ensuring FILE_ARCHIVER has required metadata
|
- **Archival Operations**: Ensuring FILE_ARCHIVER has required metadata
|
||||||
- **Testing**: Using correct control tables in test scenarios
|
- **Testing**: Using correct control tables in test scenarios
|
||||||
|
|
||||||
The recommended approach for legacy data migration is **Strategy 2 (Direct to ARCHIVE)** for large datasets, as it avoids the complexity of manual A_SOURCE_FILE_RECEIVED registration while achieving the goal of moving historical data to long-term archival storage.
|
**Recommended Approach for Legacy Data Migration**:
|
||||||
|
|
||||||
|
1. ✅ **Strategy 1 (Automatic Registration)** - Use `DATA_EXPORTER` with `pRegisterExport => TRUE` to automatically register files in `A_SOURCE_FILE_RECEIVED`, enabling full FILE_ARCHIVER workflow (archival strategies, status tracking, rollback capabilities)
|
||||||
|
|
||||||
|
2. ⚠️ **Strategy 2 (Direct to ARCHIVE)** - Export directly to ARCHIVE bucket to bypass ODS bucket entirely and avoid registration requirements (use when tracking is not needed)
|
||||||
|
|||||||
Reference in New Issue
Block a user