Merge branch 'main' of https://git.itbi.mywire.org/admin/mars
This commit is contained in:
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -121,6 +121,13 @@
|
||||
"password": "Cloudpass#34",
|
||||
"connectionString": "ggmichalski_high",
|
||||
"walletLocation": "c:\\_git\\OracleAI\\oracledb1\\Wallet_ggmichalski"
|
||||
},
|
||||
{
|
||||
"name": "OU_C2D@ggmichalski_high",
|
||||
"username": "OU_C2D",
|
||||
"password": "Cloudpass#34",
|
||||
"connectionString": "ggmichalski_high",
|
||||
"walletLocation": "c:\\_git\\OracleAI\\oracledb1\\Wallet_ggmichalski"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
@@ -64,7 +66,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -29,7 +29,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER',
|
||||
pParallelDegree => 4
|
||||
pParallelDegree => 4,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -49,7 +50,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM',
|
||||
pParallelDegree => 16
|
||||
pParallelDegree => 16,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
@@ -64,7 +66,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -29,7 +29,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES',
|
||||
pParallelDegree => 8
|
||||
pParallelDegree => 8,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
|
||||
EXCEPTION
|
||||
@@ -49,7 +50,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -25,7 +25,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -45,7 +46,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM',
|
||||
pParallelDegree => 16
|
||||
pParallelDegree => 16,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -29,7 +29,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER',
|
||||
pParallelDegree => 4
|
||||
pParallelDegree => 4,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -49,7 +50,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM',
|
||||
pParallelDegree => 16
|
||||
pParallelDegree => 16,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM',
|
||||
pParallelDegree => 4
|
||||
pParallelDegree => 4,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
@@ -64,7 +66,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
-- ============================================================================
|
||||
-- MARS-835-PREHOOK Installation Script 02: DATA_EXPORTER Package
|
||||
-- ============================================================================
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package (SPEC + BODY) with parallel processing
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package (SPEC + BODY) v2.8.1
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE DATA_EXPORTER
|
||||
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
@@ -13,8 +14,8 @@ PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Installing CT_MRDS.DATA_EXPORTER Package
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.DATA_EXPORTER
|
||||
PROMPT Version: 2.2.0 -> 2.4.0 (MINOR)
|
||||
PROMPT Change: Added parallel processing + Smart Column Mapping for CSV exports
|
||||
PROMPT Version: 2.2.0 -> 2.8.1 (PATCH)
|
||||
PROMPT Change: Fixed query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join for single file
|
||||
PROMPT ============================================================================
|
||||
|
||||
PROMPT
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
-- ====================================================================
|
||||
-- MARS-835-PREHOOK: Update A_SOURCE_FILE_RECEIVED Table Structure
|
||||
-- ====================================================================
|
||||
-- Purpose:
|
||||
-- 1. Rename column ARCH_FILE_NAME to ARCH_PATH
|
||||
-- 2. Add new column PROCESS_NAME VARCHAR2(200)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-13
|
||||
-- ====================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT MARS-835-PREHOOK: Updating A_SOURCE_FILE_RECEIVED table structure
|
||||
PROMPT ====================================================================
|
||||
|
||||
-- Check if column ARCH_FILE_NAME exists
|
||||
DECLARE
|
||||
v_column_exists NUMBER;
|
||||
v_process_name_exists NUMBER;
|
||||
BEGIN
|
||||
-- Check if ARCH_FILE_NAME exists
|
||||
SELECT COUNT(*)
|
||||
INTO v_column_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'ARCH_FILE_NAME';
|
||||
|
||||
IF v_column_exists > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Renaming column ARCH_FILE_NAME to ARCH_PATH...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED RENAME COLUMN ARCH_FILE_NAME TO ARCH_PATH';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column renamed to ARCH_PATH');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column ARCH_FILE_NAME does not exist (already renamed or first install)');
|
||||
END IF;
|
||||
|
||||
-- Check if PROCESS_NAME already exists
|
||||
SELECT COUNT(*)
|
||||
INTO v_process_name_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'PROCESS_NAME';
|
||||
|
||||
IF v_process_name_exists = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Adding new column PROCESS_NAME...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED ADD (PROCESS_NAME VARCHAR2(200))';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column PROCESS_NAME added');
|
||||
|
||||
-- Add comment on new column
|
||||
EXECUTE IMMEDIATE 'COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESS_NAME IS ''Name of the process that created this record''';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Comment added to PROCESS_NAME column');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column PROCESS_NAME already exists');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: A_SOURCE_FILE_RECEIVED table structure updated successfully');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to update table structure: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT A_SOURCE_FILE_RECEIVED Table Update Completed
|
||||
PROMPT ====================================================================
|
||||
@@ -0,0 +1,65 @@
|
||||
-- ====================================================================
|
||||
-- MARS-835-PREHOOK ROLLBACK: Revert A_SOURCE_FILE_RECEIVED Table Structure
|
||||
-- ====================================================================
|
||||
-- Purpose:
|
||||
-- 1. Rename column ARCH_PATH back to ARCH_FILE_NAME
|
||||
-- 2. Remove column PROCESS_NAME
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-13
|
||||
-- ====================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT MARS-835-PREHOOK ROLLBACK: Reverting A_SOURCE_FILE_RECEIVED table
|
||||
PROMPT ====================================================================
|
||||
|
||||
DECLARE
|
||||
v_column_exists NUMBER;
|
||||
v_process_name_exists NUMBER;
|
||||
BEGIN
|
||||
-- Check if ARCH_PATH exists (needs to be renamed back)
|
||||
SELECT COUNT(*)
|
||||
INTO v_column_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'ARCH_PATH';
|
||||
|
||||
IF v_column_exists > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Renaming column ARCH_PATH back to ARCH_FILE_NAME...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED RENAME COLUMN ARCH_PATH TO ARCH_FILE_NAME';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column renamed back to ARCH_FILE_NAME');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column ARCH_PATH does not exist (already rolled back)');
|
||||
END IF;
|
||||
|
||||
-- Check if PROCESS_NAME exists (needs to be dropped)
|
||||
SELECT COUNT(*)
|
||||
INTO v_process_name_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'PROCESS_NAME';
|
||||
|
||||
IF v_process_name_exists > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Dropping column PROCESS_NAME...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED DROP COLUMN PROCESS_NAME';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column PROCESS_NAME dropped');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column PROCESS_NAME does not exist (already rolled back)');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: A_SOURCE_FILE_RECEIVED table structure rollback completed');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to rollback table structure: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT A_SOURCE_FILE_RECEIVED Table Rollback Completed
|
||||
PROMPT ====================================================================
|
||||
@@ -31,6 +31,7 @@ PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will:
|
||||
PROMPT - Create A_PARALLEL_EXPORT_CHUNKS table with unique timestamp task names
|
||||
PROMPT - Update A_SOURCE_FILE_RECEIVED table (rename ARCH_FILE_NAME to ARCH_PATH, add PROCESS_NAME column)
|
||||
PROMPT - Update ENV_MANAGER to v3.2.0 (add parallel execution error codes)
|
||||
PROMPT - Update DATA_EXPORTER to v2.4.0 (DBMS_PARALLEL_EXECUTE + Smart Column Mapping)
|
||||
PROMPT - Add pParallelDegree parameter (1-16 threads) to EXPORT_*_BY_DATE procedures
|
||||
@@ -59,25 +60,31 @@ PROMPT =========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Deploy ENV_MANAGER Package
|
||||
PROMPT Step 2: Update A_SOURCE_FILE_RECEIVED Table Structure
|
||||
PROMPT =========================================================================
|
||||
@@03_MARS_835_PREHOOK_update_SOURCE_FILE_RECEIVED_table.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Deploy ENV_MANAGER Package
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_835_PREHOOK_install_ENV_MANAGER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Deploy DATA_EXPORTER Package
|
||||
PROMPT Step 4: Deploy DATA_EXPORTER Package
|
||||
PROMPT =========================================================================
|
||||
@@02_MARS_835_PREHOOK_install_DATA_EXPORTER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Track Package Versions
|
||||
PROMPT Step 5: Track Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 5: Verify Package Versions
|
||||
PROMPT Step 6: Verify Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
||||
FILE_BASE_NAME VARCHAR2(1000),
|
||||
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
||||
MAX_FILE_SIZE NUMBER DEFAULT 104857600 NOT NULL,
|
||||
JOB_CLASS VARCHAR2(128),
|
||||
STATUS VARCHAR2(30) DEFAULT 'PENDING' NOT NULL,
|
||||
ERROR_MESSAGE VARCHAR2(4000),
|
||||
EXPORT_TIMESTAMP TIMESTAMP,
|
||||
@@ -69,6 +70,7 @@ COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FORMAT_TYPE IS 'Export format
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FILE_BASE_NAME IS 'Base filename for CSV exports (NULL for Parquet)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TEMPLATE_TABLE_NAME IS 'Template table name for per-column date format configuration (e.g., CT_ET_TEMPLATES.TABLE_NAME)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MAX_FILE_SIZE IS 'Maximum file size in bytes for CSV exports only (e.g., 104857600 = 100MB, 1073741824 = 1GB) - default 100MB (104857600). NOTE: Not applicable for PARQUET format (Oracle limitation)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.JOB_CLASS IS 'Oracle Scheduler job class name for resource management (e.g., ''high'', ''DEFAULT_JOB_CLASS'') - NULL uses default scheduler priority';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.STATUS IS 'Chunk processing status: PENDING (not started), PROCESSING (in progress), COMPLETED (success), FAILED (error) - allows retry of failed partitions only';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.ERROR_MESSAGE IS 'Error message if chunk processing failed (STATUS = FAILED)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.EXPORT_TIMESTAMP IS 'Timestamp when chunk export was completed (STATUS = COMPLETED)';
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
-- ====================================================================
|
||||
-- A_SOURCE_FILE_RECEIVED Table
|
||||
-- ====================================================================
|
||||
-- Purpose: Track received files and their processing status
|
||||
-- ====================================================================
|
||||
|
||||
CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
A_SOURCE_FILE_CONFIG_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
SOURCE_FILE_NAME VARCHAR2(1000) NOT NULL,
|
||||
CHECKSUM VARCHAR2(128),
|
||||
CREATED TIMESTAMP(6) WITH TIME ZONE,
|
||||
BYTES NUMBER,
|
||||
RECEPTION_DATE DATE NOT NULL,
|
||||
PROCESSING_STATUS VARCHAR2(200),
|
||||
EXTERNAL_TABLE_NAME VARCHAR2(200),
|
||||
PARTITION_YEAR VARCHAR2(4),
|
||||
PARTITION_MONTH VARCHAR2(2),
|
||||
ARCH_PATH VARCHAR2(1000),
|
||||
PROCESS_NAME VARCHAR2(200),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_PK PRIMARY KEY (A_SOURCE_FILE_RECEIVED_KEY),
|
||||
CONSTRAINT ASFR_A_SOURCE_FILE_CONFIG_KEY_FK FOREIGN KEY(A_SOURCE_FILE_CONFIG_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_CHK CHECK (PROCESSING_STATUS IN ('RECEIVED', 'VALIDATED', 'READY_FOR_INGESTION', 'INGESTED', 'ARCHIVED'))
|
||||
) TABLESPACE "DATA";
|
||||
|
||||
-- Unique index for file identification (workaround for TIMESTAMP WITH TIMEZONE constraint limitation)
|
||||
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||
@@ -501,6 +501,7 @@ AS
|
||||
vFormat VARCHAR2(20);
|
||||
vFileBaseName VARCHAR2(1000);
|
||||
vMaxFileSize NUMBER;
|
||||
vJobClass VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
BEGIN
|
||||
-- Retrieve chunk context from global temporary table
|
||||
@@ -518,7 +519,8 @@ AS
|
||||
CREDENTIAL_NAME,
|
||||
FORMAT_TYPE,
|
||||
FILE_BASE_NAME,
|
||||
MAX_FILE_SIZE
|
||||
MAX_FILE_SIZE,
|
||||
JOB_CLASS
|
||||
INTO
|
||||
vYear,
|
||||
vMonth,
|
||||
@@ -533,7 +535,8 @@ AS
|
||||
vCredentialName,
|
||||
vFormat,
|
||||
vFileBaseName,
|
||||
vMaxFileSize
|
||||
vMaxFileSize,
|
||||
vJobClass
|
||||
FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||
WHERE CHUNK_ID = pStartId;
|
||||
|
||||
@@ -602,20 +605,17 @@ AS
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 default NULL,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
@@ -638,8 +638,11 @@ AS
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pTemplateTableName => '''||nvl(pTemplateTableName, 'NULL')||''''
|
||||
,'pMaxFileSize => '''||nvl(TO_CHAR(pMaxFileSize), 'NULL')||''''
|
||||
,'pRegisterExport => '''||CASE WHEN pRegisterExport THEN 'TRUE' ELSE 'FALSE' END||''''
|
||||
,'pProcessName => '''||nvl(pProcessName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
@@ -671,16 +674,8 @@ AS
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
-- Validate template table if provided
|
||||
IF pTemplateTableName IS NOT NULL THEN
|
||||
DECLARE
|
||||
@@ -760,183 +755,174 @@ AS
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File registration enabled with config key: ' || vConfigKey, 'INFO', vParameters);
|
||||
END IF;
|
||||
|
||||
-- Fetch unique key values from A_LOAD_HISTORY
|
||||
vSql := 'SELECT DISTINCT L.A_ETL_LOAD_SET_KEY' ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY';
|
||||
-- Construct single query for entire table (no join with A_LOAD_HISTORY - ensures single file output)
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing key values query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValues.COUNT || ' unique key values to process', 'DEBUG', vParameters);
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
NVL(pFileName, UPPER(vTableName) || '.csv');
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to single file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Max file size: ' || pMaxFileSize || ' bytes (' || ROUND(pMaxFileSize/1048576, 2) || ' MB)', 'DEBUG', vParameters);
|
||||
|
||||
-- Construct the query to extract data for the current key value with A_WORKFLOW_HISTORY_KEY mapping
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
-- Oracle maxfilesize: min 10MB (10485760), max 1GB (1073741824), default 100MB (104857600)
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object(
|
||||
'type' VALUE 'CSV',
|
||||
'header' VALUE true,
|
||||
'quote' VALUE CHR(34),
|
||||
'delimiter' VALUE ',',
|
||||
'escape' VALUE true,
|
||||
'recorddelimiter' VALUE CHR(13)||CHR(10), -- CRLF dla Windows
|
||||
'maxfilesize' VALUE pMaxFileSize -- Dynamic maxfilesize in bytes
|
||||
)
|
||||
);
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing key value: ' || vKeyValue || ' (' || (i) || '/' || vKeyValues.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
|
||||
-- Register exported file to A_SOURCE_FILE_RECEIVED if requested
|
||||
IF pRegisterExport THEN
|
||||
DECLARE
|
||||
vChecksum VARCHAR2(128);
|
||||
vCreated TIMESTAMP WITH TIME ZONE;
|
||||
vBytes NUMBER;
|
||||
vActualFileName VARCHAR2(1000); -- Actual filename with Oracle suffix
|
||||
vSanitizedFileName VARCHAR2(1000);
|
||||
vFileName VARCHAR2(1000);
|
||||
vRetryCount NUMBER := 0;
|
||||
vMaxRetries NUMBER := 1; -- One retry after initial attempt
|
||||
vRetryDelay NUMBER := 2; -- 2 seconds delay
|
||||
BEGIN
|
||||
-- Extract filename from URI (after last '/')
|
||||
vFileName := SUBSTR(vUri, INSTR(vUri, '/', -1) + 1);
|
||||
|
||||
-- Sanitize filename first (PL/SQL function cannot be used directly in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Remove .csv extension for LIKE pattern matching (Oracle adds suffixes BEFORE .csv)
|
||||
-- Example: keyvalue.csv becomes keyvalue_1_20260211T102621591769Z.csv
|
||||
vSanitizedFileName := REGEXP_REPLACE(vSanitizedFileName, '\.csv$', '', 1, 0, 'i');
|
||||
|
||||
-- Try to get file metadata with retry logic
|
||||
<<metadata_retry_loop>>
|
||||
LOOP
|
||||
BEGIN
|
||||
SELECT object_name, checksum, created, bytes
|
||||
INTO vActualFileName, vChecksum, vCreated, vBytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => pCredentialName,
|
||||
location_uri => vBucketUri
|
||||
))
|
||||
WHERE object_name LIKE CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || vSanitizedFileName || '%'
|
||||
ORDER BY created DESC, bytes DESC
|
||||
FETCH FIRST 1 ROW ONLY;
|
||||
|
||||
-- Extract filename only from full path (remove bucket folder prefix)
|
||||
vActualFileName := SUBSTR(vActualFileName, INSTR(vActualFileName, '/', -1) + 1);
|
||||
|
||||
-- Success - exit retry loop
|
||||
EXIT metadata_retry_loop;
|
||||
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
vRetryCount := vRetryCount + 1;
|
||||
|
||||
IF vRetryCount <= vMaxRetries THEN
|
||||
-- Log retry attempt
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File not found in bucket (attempt ' || vRetryCount || '/' || (vMaxRetries + 1) || '), retrying after ' || vRetryDelay || ' seconds: ' || vFileName, 'DEBUG', vParameters);
|
||||
|
||||
-- Wait before retry using DBMS_SESSION.SLEEP (alternative to DBMS_LOCK)
|
||||
DBMS_SESSION.SLEEP(vRetryDelay);
|
||||
ELSE
|
||||
-- Max retries exceeded - re-raise exception
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP metadata_retry_loop;
|
||||
|
||||
-- Create A_SOURCE_FILE_RECEIVED record for this export with metadata
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
CHECKSUM,
|
||||
CREATED,
|
||||
BYTES,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vActualFileName, -- Use actual filename with Oracle suffix
|
||||
vChecksum,
|
||||
vCreated,
|
||||
vBytes,
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL -- ARCH_FILE_NAME not used for single-file exports
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || vBytes || ' bytes', 'DEBUG', vParameters);
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
-- File not found after retries - log warning and continue without metadata
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('WARNING: File not found in bucket after ' || (vMaxRetries + 1) || ' attempts: ' || vFileName, 'WARNING', vParameters);
|
||||
|
||||
-- Sanitize filename for fallback INSERT (function cannot be used in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Insert without metadata using theoretical filename
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vSanitizedFileName, -- Use pre-calculated sanitized filename
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL -- ARCH_FILE_NAME not used for single-file exports
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file without metadata: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vSanitizedFileName, 'DEBUG', vParameters);
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- Log summary of file registration if enabled
|
||||
-- Register exported file to A_SOURCE_FILE_RECEIVED if requested
|
||||
IF pRegisterExport THEN
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered ' || vKeyValues.COUNT || ' exported files to A_SOURCE_FILE_RECEIVED with config key: ' || vConfigKey, 'INFO', vParameters);
|
||||
DECLARE
|
||||
vActualFileName VARCHAR2(1000); -- Actual filename with Oracle suffix
|
||||
vSanitizedFileName VARCHAR2(1000);
|
||||
vFileName VARCHAR2(1000);
|
||||
vRetryCount NUMBER := 0;
|
||||
vMaxRetries NUMBER := 1; -- One retry after initial attempt
|
||||
vRetryDelay NUMBER := 2; -- 2 seconds delay
|
||||
vFilesFound NUMBER := 0;
|
||||
vTotalBytes NUMBER := 0;
|
||||
BEGIN
|
||||
-- Extract filename from URI (after last '/')
|
||||
vFileName := SUBSTR(vUri, INSTR(vUri, '/', -1) + 1);
|
||||
|
||||
-- Sanitize filename first (PL/SQL function cannot be used directly in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Remove .csv extension for LIKE pattern matching (Oracle adds suffixes BEFORE .csv)
|
||||
-- Example: tablename.csv becomes tablename_1_20260211T102621591769Z.csv
|
||||
vSanitizedFileName := REGEXP_REPLACE(vSanitizedFileName, '\.csv$', '', 1, 0, 'i');
|
||||
|
||||
-- Try to get ALL exported files with retry logic
|
||||
-- Oracle DBMS_CLOUD.EXPORT_DATA can create MULTIPLE files due to:
|
||||
-- 1. maxfilesize parameter (splits files larger than limit)
|
||||
-- 2. Automatic parallel processing (especially on large production instances)
|
||||
-- We must register ALL files, not just the first one
|
||||
<<metadata_retry_loop>>
|
||||
LOOP
|
||||
BEGIN
|
||||
-- Register ALL files matching the pattern (cursor loop)
|
||||
FOR rec IN (
|
||||
SELECT object_name, checksum, created, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => pCredentialName,
|
||||
location_uri => vBucketUri
|
||||
))
|
||||
WHERE object_name LIKE CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || vSanitizedFileName || '%'
|
||||
ORDER BY created DESC, bytes DESC
|
||||
) LOOP
|
||||
-- Extract filename only from full path (remove bucket folder prefix)
|
||||
vActualFileName := SUBSTR(rec.object_name, INSTR(rec.object_name, '/', -1) + 1);
|
||||
|
||||
-- Create A_SOURCE_FILE_RECEIVED record for EACH exported file
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
CHECKSUM,
|
||||
CREATED,
|
||||
BYTES,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vActualFileName, -- Use actual filename with Oracle suffix
|
||||
rec.checksum,
|
||||
rec.created,
|
||||
rec.bytes,
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL, -- ARCH_PATH not used for single-file exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
|
||||
vFilesFound := vFilesFound + 1;
|
||||
vTotalBytes := vTotalBytes + rec.bytes;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file ' || vFilesFound || ': FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || rec.bytes || ' bytes', 'INFO', vParameters);
|
||||
END LOOP;
|
||||
|
||||
-- Check if any files were found
|
||||
IF vFilesFound = 0 THEN
|
||||
RAISE NO_DATA_FOUND;
|
||||
END IF;
|
||||
|
||||
-- Success - exit retry loop
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Total registered: ' || vFilesFound || ' file(s), Total size: ' || vTotalBytes || ' bytes (' || ROUND(vTotalBytes/1048576, 2) || ' MB)', 'INFO', vParameters);
|
||||
EXIT metadata_retry_loop;
|
||||
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
vRetryCount := vRetryCount + 1;
|
||||
|
||||
IF vRetryCount <= vMaxRetries THEN
|
||||
-- Log retry attempt
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File(s) not found in bucket (attempt ' || vRetryCount || '/' || (vMaxRetries + 1) || '), retrying after ' || vRetryDelay || ' seconds: ' || vFileName, 'DEBUG', vParameters);
|
||||
|
||||
-- Wait before retry using DBMS_SESSION.SLEEP (alternative to DBMS_LOCK)
|
||||
DBMS_SESSION.SLEEP(vRetryDelay);
|
||||
ELSE
|
||||
-- Max retries exceeded - re-raise exception
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP metadata_retry_loop;
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
-- File not found after retries - log warning and continue without metadata
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('WARNING: File not found in bucket after ' || (vMaxRetries + 1) || ' attempts: ' || vFileName, 'WARNING', vParameters);
|
||||
|
||||
-- Sanitize filename for fallback INSERT (function cannot be used in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Insert without metadata using theoretical filename
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vSanitizedFileName, -- Use pre-calculated sanitized filename
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL, -- ARCH_PATH not used for single-file exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file without metadata: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vSanitizedFileName, 'INFO', vParameters);
|
||||
END;
|
||||
END IF;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
@@ -949,10 +935,6 @@ AS
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in column list' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
@@ -974,6 +956,7 @@ AS
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
@@ -997,6 +980,7 @@ AS
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pParallelDegree => '''||nvl(TO_CHAR(pParallelDegree), 'NULL')||''''
|
||||
,'pTemplateTableName => '''||nvl(pTemplateTableName, 'NULL')||''''
|
||||
,'pJobClass => '''||nvl(pJobClass, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
@@ -1069,21 +1053,6 @@ AS
|
||||
BEGIN
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Using parallel processing with ' || pParallelDegree || ' threads', 'INFO', vParameters);
|
||||
|
||||
-- Clean up old completed chunks (>24 hours) to prevent table bloat
|
||||
-- CRITICAL: Do NOT delete chunks from other active sessions (same-day tasks)
|
||||
-- This prevents race conditions when multiple exports run simultaneously
|
||||
DELETE FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||
WHERE STATUS = 'COMPLETED'
|
||||
AND CREATED_DATE < SYSTIMESTAMP - INTERVAL '1' DAY;
|
||||
COMMIT;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Cleared old COMPLETED chunks (>24h). Active session chunks preserved.', 'DEBUG', vParameters);
|
||||
-- This prevents re-exporting successfully completed partitions
|
||||
DELETE FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'COMPLETED';
|
||||
COMMIT;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Cleared COMPLETED chunks. FAILED chunks retained for retry.', 'DEBUG', vParameters);
|
||||
|
||||
-- Populate chunks table (insert new chunks, preserve FAILED chunks for retry)
|
||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||
MERGE INTO CT_MRDS.A_PARALLEL_EXPORT_CHUNKS t
|
||||
@@ -1092,10 +1061,10 @@ AS
|
||||
WHEN NOT MATCHED THEN
|
||||
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
||||
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, STATUS)
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, JOB_CLASS, STATUS)
|
||||
VALUES (i, vTaskName, vPartitions(i).year, vPartitions(i).month, vSchemaName, vTableName, vKeyColumnName,
|
||||
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
||||
pCredentialName, 'PARQUET', NULL, pTemplateTableName, 104857600, 'PENDING')
|
||||
pCredentialName, 'PARQUET', NULL, pTemplateTableName, 104857600, pJobClass, 'PENDING')
|
||||
WHEN MATCHED THEN
|
||||
UPDATE SET TASK_NAME = vTaskName,
|
||||
STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
||||
@@ -1127,14 +1096,24 @@ AS
|
||||
);
|
||||
|
||||
-- Execute task in parallel
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel task: ' || vTaskName, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel task: ' || vTaskName || CASE WHEN pJobClass IS NOT NULL THEN ' with job class: ' || pJobClass ELSE '' END, 'DEBUG', vParameters);
|
||||
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
IF pJobClass IS NOT NULL THEN
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree,
|
||||
job_class => pJobClass
|
||||
);
|
||||
ELSE
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- Check for errors
|
||||
DECLARE
|
||||
@@ -1238,6 +1217,8 @@ AS
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
@@ -1275,6 +1256,7 @@ AS
|
||||
,'pTemplateTableName => '''||nvl(pTemplateTableName, 'NULL')||''''
|
||||
,'pMaxFileSize => '''||nvl(TO_CHAR(pMaxFileSize), 'NULL')||''''
|
||||
,'pRegisterExport => '''||CASE WHEN pRegisterExport THEN 'TRUE' ELSE 'FALSE' END||''''
|
||||
,'pJobClass => '''||nvl(pJobClass, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
@@ -1383,10 +1365,10 @@ AS
|
||||
WHEN NOT MATCHED THEN
|
||||
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
||||
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, STATUS)
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, JOB_CLASS, STATUS)
|
||||
VALUES (i, vTaskName, vPartitions(i).year, vPartitions(i).month, vSchemaName, vTableName, vKeyColumnName,
|
||||
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
||||
pCredentialName, 'CSV', vFileBaseName, pTemplateTableName, pMaxFileSize, 'PENDING')
|
||||
pCredentialName, 'CSV', vFileBaseName, pTemplateTableName, pMaxFileSize, pJobClass, 'PENDING')
|
||||
WHEN MATCHED THEN
|
||||
UPDATE SET TASK_NAME = vTaskName,
|
||||
STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
||||
@@ -1418,14 +1400,24 @@ AS
|
||||
);
|
||||
|
||||
-- Execute task in parallel
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel CSV export task: ' || vTaskName, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel CSV export task: ' || vTaskName || CASE WHEN pJobClass IS NOT NULL THEN ' with job class: ' || pJobClass ELSE '' END, 'DEBUG', vParameters);
|
||||
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
IF pJobClass IS NOT NULL THEN
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree,
|
||||
job_class => pJobClass
|
||||
);
|
||||
ELSE
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- Check for errors
|
||||
DECLARE
|
||||
@@ -1584,7 +1576,8 @@ AS
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
vConfigKey, -- Config key from A_SOURCE_FILE_CONFIG lookup
|
||||
@@ -1596,7 +1589,8 @@ AS
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for CSV exports
|
||||
NULL, -- PARTITION_MONTH not used for CSV exports
|
||||
NULL -- ARCH_FILE_NAME not used for CSV exports
|
||||
NULL, -- ARCH_PATH not used for CSV exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || vBytes || ' bytes', 'DEBUG', vParameters);
|
||||
@@ -1618,7 +1612,8 @@ AS
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
vConfigKey, -- Config key from A_SOURCE_FILE_CONFIG lookup
|
||||
@@ -1627,7 +1622,8 @@ AS
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for CSV exports
|
||||
NULL, -- PARTITION_MONTH not used for CSV exports
|
||||
NULL -- ARCH_FILE_NAME not used for CSV exports
|
||||
NULL, -- ARCH_PATH not used for CSV exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
@@ -9,21 +9,17 @@ AS
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.5';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-11 12:15:00';
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.11.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-18 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10) ||
|
||||
'v2.7.3 (2026-02-11): FIX LIKE pattern for DBMS_CLOUD.LIST_OBJECTS - Removed .csv extension from filename before pattern matching.' || CHR(10) ||
|
||||
'v2.7.2 (2026-02-11): FIX pRegisterExport in EXPORT_TABLE_DATA_TO_CSV_BY_DATE - Added missing pRegisterExport parameter to EXPORT_SINGLE_PARTITION call.' || CHR(10) ||
|
||||
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName to automatically find config key from A_SOURCE_FILE_CONFIG.' || CHR(10) ||
|
||||
'v2.7.0 (2026-02-10): Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM properly assigned to vgMsgTmp variable.' || CHR(10) ||
|
||||
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Session-safe cleanup with TASK_NAME filtering.' || CHR(10) ||
|
||||
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry.' || CHR(10);
|
||||
'v2.11.0 (2026-02-18): Added pJobClass parameter to EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE for Oracle Scheduler job class support (resource/priority management).' || CHR(10) ||
|
||||
'v2.10.1 (2026-02-17): CRITICAL FIX - Remove redundant COMPLETED chunks deletion before parallel export that caused ORA-01403 errors (phantom chunks created by CREATE_CHUNKS_BY_NUMBER_COL).' || CHR(10) ||
|
||||
'v2.10.0 (2026-02-13): CRITICAL FIX - Register ALL files created by DBMS_CLOUD.EXPORT_DATA (multi-file support due to Oracle parallel processing on large instances). Prevents orphaned files in rollback.' || CHR(10) ||
|
||||
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
|
||||
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
@@ -71,16 +67,19 @@ AS
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* Exports data into single CSV file on OCI infrastructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* Supports template table for column order and per-column date formatting.
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* When pRegisterExport=TRUE, successfully exported file is registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pFileName - Optional filename (e.g., 'export.csv'). NULL = auto-generate from table name
|
||||
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||
* - Column order control (template defines CSV structure)
|
||||
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||
* - NULL = use source table columns in natural order
|
||||
* @param pRegisterExport - When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @param pMaxFileSize - Maximum file size in bytes (default 104857600 = 100MB, min 10MB, max 1GB)
|
||||
* @param pRegisterExport - When TRUE, registers exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
@@ -89,7 +88,9 @@ AS
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports',
|
||||
* pFileName => 'my_export.csv', -- Optional
|
||||
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||
* pMaxFileSize => 104857600, -- Optional, default 100MB
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||
* );
|
||||
* end;
|
||||
@@ -100,8 +101,11 @@ AS
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 default NULL,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
@@ -143,6 +147,7 @@ AS
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
@@ -158,6 +163,7 @@ AS
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
@@ -203,6 +209,8 @@ AS
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ PROMPT MARS-835-PREHOOK: Rollback to Previous Versions
|
||||
PROMPT =========================================================================
|
||||
PROMPT WARNING: This will reverse all changes from MARS-835-PREHOOK installation!
|
||||
PROMPT - Removes A_PARALLEL_EXPORT_CHUNKS table
|
||||
PROMPT - Reverts A_SOURCE_FILE_RECEIVED table (rename ARCH_PATH to ARCH_FILE_NAME, drop PROCESS_NAME column)
|
||||
PROMPT - Restores ENV_MANAGER v3.1.0 (removes parallel error codes)
|
||||
PROMPT - Restores DATA_EXPORTER v2.1.0 (removes parallel + Smart Column Mapping)
|
||||
PROMPT =========================================================================
|
||||
@@ -65,13 +66,19 @@ PROMPT =========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Track Rollback Version
|
||||
PROMPT Step 3: Rollback A_SOURCE_FILE_RECEIVED Table Structure
|
||||
PROMPT =========================================================================
|
||||
@@93_MARS_835_PREHOOK_rollback_SOURCE_FILE_RECEIVED_table.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Track Rollback Version
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Package Versions After Rollback
|
||||
PROMPT Step 5: Verify Package Versions After Rollback
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
-- ====================================================================
|
||||
-- A_SOURCE_FILE_RECEIVED Table
|
||||
-- ====================================================================
|
||||
-- Purpose: Track received files and their processing status
|
||||
-- ====================================================================
|
||||
|
||||
CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
A_SOURCE_FILE_CONFIG_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
SOURCE_FILE_NAME VARCHAR2(1000) NOT NULL,
|
||||
CHECKSUM VARCHAR2(128),
|
||||
CREATED TIMESTAMP(6) WITH TIME ZONE,
|
||||
BYTES NUMBER,
|
||||
RECEPTION_DATE DATE NOT NULL,
|
||||
PROCESSING_STATUS VARCHAR2(200),
|
||||
EXTERNAL_TABLE_NAME VARCHAR2(200),
|
||||
PARTITION_YEAR VARCHAR2(4),
|
||||
PARTITION_MONTH VARCHAR2(2),
|
||||
ARCH_FILE_NAME VARCHAR2(1000),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_PK PRIMARY KEY (A_SOURCE_FILE_RECEIVED_KEY),
|
||||
CONSTRAINT ASFR_A_SOURCE_FILE_CONFIG_KEY_FK FOREIGN KEY(A_SOURCE_FILE_CONFIG_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_CHK CHECK (PROCESSING_STATUS IN ('RECEIVED', 'VALIDATED', 'READY_FOR_INGESTION', 'INGESTED', 'ARCHIVED'))
|
||||
) TABLESPACE "DATA";
|
||||
|
||||
-- Unique index for file identification (workaround for TIMESTAMP WITH TIMEZONE constraint limitation)
|
||||
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,233 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.5';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-11 12:15:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10) ||
|
||||
'v2.7.3 (2026-02-11): FIX LIKE pattern for DBMS_CLOUD.LIST_OBJECTS - Removed .csv extension from filename before pattern matching.' || CHR(10) ||
|
||||
'v2.7.2 (2026-02-11): FIX pRegisterExport in EXPORT_TABLE_DATA_TO_CSV_BY_DATE - Added missing pRegisterExport parameter to EXPORT_SINGLE_PARTITION call.' || CHR(10) ||
|
||||
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName to automatically find config key from A_SOURCE_FILE_CONFIG.' || CHR(10) ||
|
||||
'v2.7.0 (2026-02-10): Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM properly assigned to vgMsgTmp variable.' || CHR(10) ||
|
||||
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Session-safe cleanup with TASK_NAME filtering.' || CHR(10) ||
|
||||
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry.' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* Supports template table for column order and per-column date formatting.
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||
* - Column order control (template defines CSV structure)
|
||||
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||
* - NULL = use source table columns in natural order
|
||||
* @param pRegisterExport - When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports',
|
||||
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17',
|
||||
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
||||
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||
* It is a standard.
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Example comment:
|
||||
/**
|
||||
* @name EX_PROCEDURE_NAME
|
||||
* @desc Procedure description
|
||||
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||
* @ex_rslt Example Result
|
||||
**/
|
||||
|
||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (Latest changes first)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||
|
||||
TYPE Error_Record IS RECORD (
|
||||
code PLS_INTEGER,
|
||||
message VARCHAR2(4000)
|
||||
);
|
||||
|
||||
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||
|
||||
Errors tErrorList;
|
||||
|
||||
|
||||
guid VARCHAR2(32);
|
||||
gvEnv VARCHAR2(200);
|
||||
gvUsername VARCHAR2(128);
|
||||
gvOsuser VARCHAR2(128);
|
||||
gvMachine VARCHAR2(64);
|
||||
gvModule VARCHAR2(64);
|
||||
|
||||
gvNameSpace VARCHAR2(200);
|
||||
gvRegion VARCHAR2(200);
|
||||
gvDataBucketName VARCHAR2(200);
|
||||
gvInboxBucketName VARCHAR2(200);
|
||||
gvArchiveBucketName VARCHAR2(200);
|
||||
gvDataBucketUri VARCHAR2(200);
|
||||
gvInboxBucketUri VARCHAR2(200);
|
||||
gvArchiveBucketUri VARCHAR2(200);
|
||||
gvCredentialName VARCHAR2(200);
|
||||
|
||||
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||
|
||||
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||
|
||||
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
|
||||
vgSourceFileConfigKey PLS_INTEGER;
|
||||
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
--Exceptions
|
||||
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||
|
||||
|
||||
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||
|
||||
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||
|
||||
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||
|
||||
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||
|
||||
|
||||
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||
|
||||
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||
|
||||
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||
,CODE_FILE_VALIDATION_FAILED);
|
||||
|
||||
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||
|
||||
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||
,CODE_NO_CONFIG_MATCH);
|
||||
|
||||
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||
,CODE_UNKNOWN_PREFIX);
|
||||
|
||||
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||
,CODE_TABLE_NOT_EXISTS);
|
||||
|
||||
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||
,CODE_COLUMN_NOT_EXISTS);
|
||||
|
||||
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||
|
||||
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||
,CODE_MISSING_SOURCE_KEY);
|
||||
|
||||
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||
|
||||
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||
,CODE_DUPLICATED_SOURCE_KEY);
|
||||
|
||||
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||
,CODE_MISSING_CONTAINER_CONFIG);
|
||||
|
||||
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||
|
||||
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||
,CODE_WRONG_DESTINATION_PARAM);
|
||||
|
||||
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||
|
||||
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||
,CODE_FILE_ALREADY_REGISTERED);
|
||||
|
||||
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||
|
||||
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||
||cgBL||' It can be set up in two different ways:'
|
||||
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||
;
|
||||
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||
,CODE_ENVIRONMENT_NOT_SET);
|
||||
|
||||
|
||||
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||
|
||||
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||
|
||||
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||
|
||||
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||
|
||||
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||
|
||||
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||
|
||||
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||
|
||||
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||
,CODE_INVALID_BUCKET_AREA);
|
||||
|
||||
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||
,CODE_INVALID_PARALLEL_DEGREE);
|
||||
|
||||
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||
|
||||
ERR_UNKNOWN EXCEPTION;
|
||||
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||
,CODE_UNKNOWN);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_EVENT
|
||||
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||
* Respects logging level configuration (gvMinLogLevel).
|
||||
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||
**/
|
||||
PROCEDURE LOG_PROCESS_EVENT (
|
||||
pLogMessage VARCHAR2
|
||||
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_ERROR
|
||||
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||
* This procedure captures comprehensive error information for debugging purposes while
|
||||
* allowing clean user-facing error messages to be raised separately.
|
||||
* @param pLogMessage - Base error message description
|
||||
* @param pParameters - Procedure parameters for context
|
||||
* @param pProcessName - Name of the calling process/package
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||
*/
|
||||
PROCEDURE LOG_PROCESS_ERROR (
|
||||
pLogMessage VARCHAR2
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name INIT_ERRORS
|
||||
* @desc Loads data into Errors array.
|
||||
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||
* Called automatically during package initialization.
|
||||
* @example Called automatically when package is first referenced
|
||||
* @ex_rslt Errors array populated with all error codes and messages
|
||||
**/
|
||||
PROCEDURE INIT_ERRORS;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_DEFAULT_ENV
|
||||
* @desc It returns string with name of default environment.
|
||||
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||
* @ex_rslt dev
|
||||
**/
|
||||
FUNCTION GET_DEFAULT_ENV
|
||||
RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name INIT_VARIABLES
|
||||
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||
* Assign values to following global package variables:
|
||||
* - gvNameSpace
|
||||
* - gvRegion
|
||||
* - gvCredentialName
|
||||
* - gvInboxBucketName
|
||||
* - gvDataBucketName
|
||||
* - gvArchiveBucketName
|
||||
* - gvInboxBucketUri
|
||||
* - gvDataBucketUri
|
||||
* - gvArchiveBucketUri
|
||||
* - gvLoggingEnabled
|
||||
* - gvMinLogLevel
|
||||
* - gvDefaultDateFormat
|
||||
* - gvConsoleLoggingEnabled
|
||||
**/
|
||||
PROCEDURE INIT_VARIABLES(
|
||||
pEnv VARCHAR2
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_MESSAGE
|
||||
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||
* @ex_rslt File not found on the cloud
|
||||
**/
|
||||
FUNCTION GET_ERROR_MESSAGE(
|
||||
pCode PLS_INTEGER
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_STACK
|
||||
* @desc It returns string with all possible error stack info.
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example
|
||||
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||
* pFormat => 'OUTPUT'
|
||||
* ,pCode => -20009
|
||||
* ,pSourceFileReceivedKey => NULL)
|
||||
* from dual
|
||||
* @ex_rslt
|
||||
* ------------------------------------------------------+
|
||||
* Error Message:
|
||||
* ORA-0000: normal, successful completion
|
||||
* -------------------------------------------------------
|
||||
* Error Stack:
|
||||
* -------------------------------------------------------
|
||||
* Error Backtrace:
|
||||
* ------------------------------------------------------+
|
||||
**/
|
||||
FUNCTION GET_ERROR_STACK(
|
||||
pFormat VARCHAR2
|
||||
,pCode PLS_INTEGER
|
||||
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_PARAMETERS
|
||||
* @desc Formats parameter list for logging purposes.
|
||||
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||
* @ex_rslt param1=value1 ,
|
||||
* param2=NULL
|
||||
**/
|
||||
FUNCTION FORMAT_PARAMETERS(
|
||||
pParameterList SYS.ODCIVARCHAR2LIST
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name ANALYZE_VALIDATION_ERRORS
|
||||
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||
* Compares CSV structure with template table and provides specific error analysis.
|
||||
* Includes suggested solutions for common validation issues.
|
||||
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||
* @param pCsvFileUri - URI of CSV file being validated
|
||||
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||
**/
|
||||
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||
pValidationLogTable VARCHAR2,
|
||||
pTemplateSchema VARCHAR2,
|
||||
pTemplateTable VARCHAR2,
|
||||
pCsvFileUri VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name GET_VERSION
|
||||
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||
* @ex_rslt 3.0.0
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_BUILD_INFO
|
||||
* @desc Returns comprehensive build information including version, build date, and author.
|
||||
* Formatted for display in logs or monitoring systems.
|
||||
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||
* @ex_rslt Package: ENV_MANAGER
|
||||
* Version: 3.0.0
|
||||
* Build Date: 2025-10-22 16:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_VERSION_HISTORY
|
||||
* @desc Returns complete version history with all releases and changes.
|
||||
* Shows evolution of package features over time.
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||
* @ex_rslt ENV_MANAGER Version History:
|
||||
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_VERSION_INFO
|
||||
* @desc Universal function to get formatted version information for any package.
|
||||
* This centralized function is used by all packages in the system.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||
* @param pBuildDate - Build date timestamp
|
||||
* @param pAuthor - Package author name
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||
* @ex_rslt Package: FILE_MANAGER
|
||||
* Version: 2.1.0
|
||||
* Build Date: 2025-10-22 15:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName VARCHAR2,
|
||||
pVersion VARCHAR2,
|
||||
pBuildDate VARCHAR2,
|
||||
pAuthor VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_VERSION_HISTORY
|
||||
* @desc Universal function to format version history for any package.
|
||||
* Adds package name header and proper formatting.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersionHistory - Complete version history text
|
||||
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||
* @ex_rslt FILE_MANAGER Version History:
|
||||
* 2.1.0 (2025-10-22): Export procedures...
|
||||
**/
|
||||
FUNCTION FORMAT_VERSION_HISTORY(
|
||||
pPackageName VARCHAR2,
|
||||
pVersionHistory VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name CALCULATE_PACKAGE_HASH
|
||||
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||
* Returns hash for both SPEC and BODY (if exists).
|
||||
* Used for automatic change detection.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||
**/
|
||||
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name TRACK_PACKAGE_VERSION
|
||||
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||
* Automatically detects if source code changed without version update.
|
||||
* Should be called after every package deployment.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||
**/
|
||||
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageVersion VARCHAR2,
|
||||
pPackageBuildDate VARCHAR2,
|
||||
pPackageAuthor VARCHAR2
|
||||
);
|
||||
|
||||
/**
|
||||
* @name CHECK_PACKAGE_CHANGES
|
||||
* @desc Checks if package source code has changed since last tracking.
|
||||
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||
* Returns detailed change detection report.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt WARNING: Package changed without version update!
|
||||
* Last Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Hash (SPEC): B8C4D6E0...
|
||||
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||
**/
|
||||
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_HASH_INFO
|
||||
* @desc Returns formatted information about package hash and tracking history.
|
||||
* Includes current hash, last tracked hash, and change detection status.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||
* Current Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Tracked: 2025-10-22 16:30:00
|
||||
* Status: OK - No changes detected
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
END ENV_MANAGER;
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,239 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.9.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-13 14:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
|
||||
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10) ||
|
||||
'v2.8.0 (2026-02-12): MAJOR REFACTOR - EXPORT_TABLE_DATA now exports to single CSV file instead of partitioning by key values. Added pFileName parameter.' || CHR(10) ||
|
||||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into single CSV file on OCI infrastructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* Supports template table for column order and per-column date formatting.
|
||||
* When pRegisterExport=TRUE, successfully exported file is registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pFileName - Optional filename (e.g., 'export.csv'). NULL = auto-generate from table name
|
||||
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||
* - Column order control (template defines CSV structure)
|
||||
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||
* - NULL = use source table columns in natural order
|
||||
* @param pMaxFileSize - Maximum file size in bytes (default 104857600 = 100MB, min 10MB, max 1GB)
|
||||
* @param pRegisterExport - When TRUE, registers exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports',
|
||||
* pFileName => 'my_export.csv', -- Optional
|
||||
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||
* pMaxFileSize => 104857600, -- Optional, default 100MB
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 default NULL,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17',
|
||||
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
||||
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||
* It is a standard.
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Example comment:
|
||||
/**
|
||||
* @name EX_PROCEDURE_NAME
|
||||
* @desc Procedure description
|
||||
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||
* @ex_rslt Example Result
|
||||
**/
|
||||
|
||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (Latest changes first)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||
|
||||
TYPE Error_Record IS RECORD (
|
||||
code PLS_INTEGER,
|
||||
message VARCHAR2(4000)
|
||||
);
|
||||
|
||||
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||
|
||||
Errors tErrorList;
|
||||
|
||||
|
||||
guid VARCHAR2(32);
|
||||
gvEnv VARCHAR2(200);
|
||||
gvUsername VARCHAR2(128);
|
||||
gvOsuser VARCHAR2(128);
|
||||
gvMachine VARCHAR2(64);
|
||||
gvModule VARCHAR2(64);
|
||||
|
||||
gvNameSpace VARCHAR2(200);
|
||||
gvRegion VARCHAR2(200);
|
||||
gvDataBucketName VARCHAR2(200);
|
||||
gvInboxBucketName VARCHAR2(200);
|
||||
gvArchiveBucketName VARCHAR2(200);
|
||||
gvDataBucketUri VARCHAR2(200);
|
||||
gvInboxBucketUri VARCHAR2(200);
|
||||
gvArchiveBucketUri VARCHAR2(200);
|
||||
gvCredentialName VARCHAR2(200);
|
||||
|
||||
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||
|
||||
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||
|
||||
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
|
||||
vgSourceFileConfigKey PLS_INTEGER;
|
||||
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
--Exceptions
|
||||
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||
|
||||
|
||||
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||
|
||||
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||
|
||||
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||
|
||||
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||
|
||||
|
||||
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||
|
||||
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||
|
||||
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||
,CODE_FILE_VALIDATION_FAILED);
|
||||
|
||||
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||
|
||||
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||
,CODE_NO_CONFIG_MATCH);
|
||||
|
||||
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||
,CODE_UNKNOWN_PREFIX);
|
||||
|
||||
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||
,CODE_TABLE_NOT_EXISTS);
|
||||
|
||||
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||
,CODE_COLUMN_NOT_EXISTS);
|
||||
|
||||
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||
|
||||
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||
,CODE_MISSING_SOURCE_KEY);
|
||||
|
||||
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||
|
||||
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||
,CODE_DUPLICATED_SOURCE_KEY);
|
||||
|
||||
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||
,CODE_MISSING_CONTAINER_CONFIG);
|
||||
|
||||
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||
|
||||
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||
,CODE_WRONG_DESTINATION_PARAM);
|
||||
|
||||
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||
|
||||
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||
,CODE_FILE_ALREADY_REGISTERED);
|
||||
|
||||
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||
|
||||
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||
||cgBL||' It can be set up in two different ways:'
|
||||
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||
;
|
||||
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||
,CODE_ENVIRONMENT_NOT_SET);
|
||||
|
||||
|
||||
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||
|
||||
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||
|
||||
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||
|
||||
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||
|
||||
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||
|
||||
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||
|
||||
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||
|
||||
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||
,CODE_INVALID_BUCKET_AREA);
|
||||
|
||||
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||
,CODE_INVALID_PARALLEL_DEGREE);
|
||||
|
||||
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||
|
||||
ERR_UNKNOWN EXCEPTION;
|
||||
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||
,CODE_UNKNOWN);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_EVENT
|
||||
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||
* Respects logging level configuration (gvMinLogLevel).
|
||||
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||
**/
|
||||
PROCEDURE LOG_PROCESS_EVENT (
|
||||
pLogMessage VARCHAR2
|
||||
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_ERROR
|
||||
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||
* This procedure captures comprehensive error information for debugging purposes while
|
||||
* allowing clean user-facing error messages to be raised separately.
|
||||
* @param pLogMessage - Base error message description
|
||||
* @param pParameters - Procedure parameters for context
|
||||
* @param pProcessName - Name of the calling process/package
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||
*/
|
||||
PROCEDURE LOG_PROCESS_ERROR (
|
||||
pLogMessage VARCHAR2
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name INIT_ERRORS
|
||||
* @desc Loads data into Errors array.
|
||||
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||
* Called automatically during package initialization.
|
||||
* @example Called automatically when package is first referenced
|
||||
* @ex_rslt Errors array populated with all error codes and messages
|
||||
**/
|
||||
PROCEDURE INIT_ERRORS;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_DEFAULT_ENV
|
||||
* @desc It returns string with name of default environment.
|
||||
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||
* @ex_rslt dev
|
||||
**/
|
||||
FUNCTION GET_DEFAULT_ENV
|
||||
RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name INIT_VARIABLES
|
||||
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||
* Assign values to following global package variables:
|
||||
* - gvNameSpace
|
||||
* - gvRegion
|
||||
* - gvCredentialName
|
||||
* - gvInboxBucketName
|
||||
* - gvDataBucketName
|
||||
* - gvArchiveBucketName
|
||||
* - gvInboxBucketUri
|
||||
* - gvDataBucketUri
|
||||
* - gvArchiveBucketUri
|
||||
* - gvLoggingEnabled
|
||||
* - gvMinLogLevel
|
||||
* - gvDefaultDateFormat
|
||||
* - gvConsoleLoggingEnabled
|
||||
**/
|
||||
PROCEDURE INIT_VARIABLES(
|
||||
pEnv VARCHAR2
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_MESSAGE
|
||||
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||
* @ex_rslt File not found on the cloud
|
||||
**/
|
||||
FUNCTION GET_ERROR_MESSAGE(
|
||||
pCode PLS_INTEGER
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_STACK
|
||||
* @desc It returns string with all possible error stack info.
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example
|
||||
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||
* pFormat => 'OUTPUT'
|
||||
* ,pCode => -20009
|
||||
* ,pSourceFileReceivedKey => NULL)
|
||||
* from dual
|
||||
* @ex_rslt
|
||||
* ------------------------------------------------------+
|
||||
* Error Message:
|
||||
* ORA-0000: normal, successful completion
|
||||
* -------------------------------------------------------
|
||||
* Error Stack:
|
||||
* -------------------------------------------------------
|
||||
* Error Backtrace:
|
||||
* ------------------------------------------------------+
|
||||
**/
|
||||
FUNCTION GET_ERROR_STACK(
|
||||
pFormat VARCHAR2
|
||||
,pCode PLS_INTEGER
|
||||
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_PARAMETERS
|
||||
* @desc Formats parameter list for logging purposes.
|
||||
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||
* @ex_rslt param1=value1 ,
|
||||
* param2=NULL
|
||||
**/
|
||||
FUNCTION FORMAT_PARAMETERS(
|
||||
pParameterList SYS.ODCIVARCHAR2LIST
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name ANALYZE_VALIDATION_ERRORS
|
||||
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||
* Compares CSV structure with template table and provides specific error analysis.
|
||||
* Includes suggested solutions for common validation issues.
|
||||
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||
* @param pCsvFileUri - URI of CSV file being validated
|
||||
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||
**/
|
||||
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||
pValidationLogTable VARCHAR2,
|
||||
pTemplateSchema VARCHAR2,
|
||||
pTemplateTable VARCHAR2,
|
||||
pCsvFileUri VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name GET_VERSION
|
||||
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||
* @ex_rslt 3.0.0
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_BUILD_INFO
|
||||
* @desc Returns comprehensive build information including version, build date, and author.
|
||||
* Formatted for display in logs or monitoring systems.
|
||||
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||
* @ex_rslt Package: ENV_MANAGER
|
||||
* Version: 3.0.0
|
||||
* Build Date: 2025-10-22 16:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_VERSION_HISTORY
|
||||
* @desc Returns complete version history with all releases and changes.
|
||||
* Shows evolution of package features over time.
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||
* @ex_rslt ENV_MANAGER Version History:
|
||||
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_VERSION_INFO
|
||||
* @desc Universal function to get formatted version information for any package.
|
||||
* This centralized function is used by all packages in the system.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||
* @param pBuildDate - Build date timestamp
|
||||
* @param pAuthor - Package author name
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||
* @ex_rslt Package: FILE_MANAGER
|
||||
* Version: 2.1.0
|
||||
* Build Date: 2025-10-22 15:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName VARCHAR2,
|
||||
pVersion VARCHAR2,
|
||||
pBuildDate VARCHAR2,
|
||||
pAuthor VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_VERSION_HISTORY
|
||||
* @desc Universal function to format version history for any package.
|
||||
* Adds package name header and proper formatting.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersionHistory - Complete version history text
|
||||
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||
* @ex_rslt FILE_MANAGER Version History:
|
||||
* 2.1.0 (2025-10-22): Export procedures...
|
||||
**/
|
||||
FUNCTION FORMAT_VERSION_HISTORY(
|
||||
pPackageName VARCHAR2,
|
||||
pVersionHistory VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name CALCULATE_PACKAGE_HASH
|
||||
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||
* Returns hash for both SPEC and BODY (if exists).
|
||||
* Used for automatic change detection.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||
**/
|
||||
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name TRACK_PACKAGE_VERSION
|
||||
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||
* Automatically detects if source code changed without version update.
|
||||
* Should be called after every package deployment.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||
**/
|
||||
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageVersion VARCHAR2,
|
||||
pPackageBuildDate VARCHAR2,
|
||||
pPackageAuthor VARCHAR2
|
||||
);
|
||||
|
||||
/**
|
||||
* @name CHECK_PACKAGE_CHANGES
|
||||
* @desc Checks if package source code has changed since last tracking.
|
||||
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||
* Returns detailed change detection report.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt WARNING: Package changed without version update!
|
||||
* Last Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Hash (SPEC): B8C4D6E0...
|
||||
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||
**/
|
||||
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_HASH_INFO
|
||||
* @desc Returns formatted information about package hash and tracking history.
|
||||
* Includes current hash, last tracked hash, and change detection status.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||
* Current Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Tracked: 2025-10-22 16:30:00
|
||||
* Status: OK - No changes detected
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
END ENV_MANAGER;
|
||||
/
|
||||
@@ -103,11 +103,13 @@ BEGIN
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
pRegisterExport => TRUE -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pProcessName => 'MARS-835', -- Process identifier for tracking
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to DATA bucket with template column order');
|
||||
@@ -128,7 +130,8 @@ BEGIN
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to HIST bucket with template column order');
|
||||
@@ -223,11 +226,13 @@ BEGIN
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT_DAILY',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
pRegisterExport => TRUE -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pProcessName => 'MARS-835', -- Process identifier for tracking
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to DATA bucket with template column order');
|
||||
@@ -248,7 +253,8 @@ BEGIN
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to HIST bucket with template column order');
|
||||
|
||||
@@ -33,9 +33,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_RAT_FULL exported to HIST bucket with template column order');
|
||||
@@ -60,9 +62,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_DESC_FULL exported to HIST bucket with template column order');
|
||||
@@ -87,9 +91,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_RAT_FULL exported to HIST bucket with template column order');
|
||||
@@ -114,9 +120,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_DESC_FULL exported to HIST bucket with template column order');
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835 ROLLBACK: Delete File Registration Records
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Delete all file registration records from A_SOURCE_FILE_RECEIVED table for MARS-835 process
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-13
|
||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting file registration records from A_SOURCE_FILE_RECEIVED
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vRowCount NUMBER := 0;
|
||||
vStartTime TIMESTAMP := SYSTIMESTAMP;
|
||||
vEndTime TIMESTAMP;
|
||||
vElapsedSeconds NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting all MARS-835 file registrations from A_SOURCE_FILE_RECEIVED...');
|
||||
|
||||
-- Delete all records for MARS-835 process
|
||||
DELETE FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-835';
|
||||
|
||||
vRowCount := SQL%ROWCOUNT;
|
||||
COMMIT;
|
||||
|
||||
vEndTime := SYSTIMESTAMP;
|
||||
vElapsedSeconds := EXTRACT(SECOND FROM (vEndTime - vStartTime)) +
|
||||
EXTRACT(MINUTE FROM (vEndTime - vStartTime)) * 60 +
|
||||
EXTRACT(HOUR FROM (vEndTime - vStartTime)) * 3600;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: File registration records deleted');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Records deleted: ' || vRowCount);
|
||||
DBMS_OUTPUT.PUT_LINE('Elapsed time: ' || ROUND(vElapsedSeconds, 2) || ' seconds');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
ROLLBACK;
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to delete file registration records');
|
||||
DBMS_OUTPUT.PUT_LINE('Error message: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -22,25 +22,24 @@ DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT CSV files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_YYYYMM.csv OR LEGACY_DEBT_YYYYMM_1_20260122T...Z.csv (Oracle timestamp)
|
||||
-- Delete CSV files for DEBT from DATA bucket using LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name LIKE 'LEGACY_DEBT%'
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -48,6 +47,7 @@ BEGIN
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -58,19 +58,20 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_20260122T...Z.parquet (Oracle timestamp)
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT Parquet files from ARCHIVE bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
|
||||
vFileCount := 0;
|
||||
|
||||
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name NOT LIKE '%/' -- Exclude directories
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -78,6 +79,7 @@ BEGIN
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -88,7 +90,11 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT files deleted');
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT Parquet files found to delete');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
|
||||
END;
|
||||
/
|
||||
|
||||
@@ -104,25 +110,24 @@ DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY CSV files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_DAILY_YYYYMM.csv OR LEGACY_DEBT_DAILY_YYYYMM_1_timestamp.csv
|
||||
-- Delete CSV files for DEBT_DAILY from DATA bucket using LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_DAILY_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY%'
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -130,6 +135,7 @@ BEGIN
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -140,19 +146,20 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_timestamp.parquet
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY Parquet files from ARCHIVE bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
|
||||
vFileCount := 0;
|
||||
|
||||
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name NOT LIKE '%/' -- Exclude directories
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -160,6 +167,7 @@ BEGIN
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -170,7 +178,11 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY files deleted');
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT_DAILY Parquet files found to delete');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
|
||||
END;
|
||||
/
|
||||
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
# MARS-835: One-Time CSDB Data Export from Operational Database to External Tables
|
||||
|
||||
## Overview
|
||||
This package performs a one-time bulk export of CSDB data from operational database tables (OU_CSDB schema) to new external tables in OCI buckets. The export uses DATA_EXPORTER v2.4.0 with per-column date format handling to move historical data to either DATA bucket (CSV format) or HIST bucket (Parquet format with Hive-style partitioning).
|
||||
|
||||
**Migration Strategy:**
|
||||
- **Split Export (2 tables)**: DEBT, DEBT_DAILY - Last 6 months → DATA (CSV), Older data → HIST (Parquet)
|
||||
- **HIST Only (4 tables)**: INSTR_RAT_FULL, INSTR_DESC_FULL, ISSUER_RAT_FULL, ISSUER_DESC_FULL - All data → HIST (Parquet)
|
||||
|
||||
**Key Transformations:**
|
||||
- Column rename: `A_ETL_LOAD_SET_FK` → `A_WORKFLOW_HISTORY_KEY` (all tables)
|
||||
- Column removal: DEBT (2 columns), DEBT_DAILY (6 columns) not required in new structure
|
||||
|
||||
## Contents
|
||||
- `install_mars835.sql` - Master installation script with SPOOL logging
|
||||
- `rollback_mars835.sql` - Master rollback script
|
||||
- `01_MARS_835_*.sql` - Individual installation scripts
|
||||
- `91_MARS_835_*.sql` - Individual rollback scripts
|
||||
- `track_package_versions.sql` - Package version tracking
|
||||
- `verify_packages_version.sql` - Package verification
|
||||
|
||||
## Prerequisites
|
||||
- Oracle Database 23ai
|
||||
- ADMIN user access (required for all MARS installations)
|
||||
- ENV_MANAGER v3.1.0+
|
||||
- Required schema privileges
|
||||
|
||||
## Installation
|
||||
|
||||
### Option 1: Master Script (Recommended)
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user for proper privilege management
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/install_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
# Log file created: log/INSTALL_MARS_835_<PDB>_<timestamp>.log
|
||||
```
|
||||
|
||||
### Option 2: Individual Scripts
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "01_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
Get-Content "02_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
# ... etc
|
||||
```
|
||||
|
||||
## Verification
|
||||
```sql
|
||||
-- Verify package versions
|
||||
SELECT PACKAGE_NAME.GET_VERSION() FROM DUAL;
|
||||
|
||||
-- Check for errors (ADMIN user checks specific schema)
|
||||
SELECT * FROM ALL_ERRORS
|
||||
WHERE OWNER = 'CT_MRDS' -- Replace with target schema
|
||||
AND NAME = 'PACKAGE_NAME';
|
||||
|
||||
-- Verify no untracked changes
|
||||
SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'PACKAGE_NAME') FROM DUAL;
|
||||
```
|
||||
|
||||
## Rollback
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/rollback_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
**NOTE**: Rollback for data exports is **NOT RECOMMENDED** as it would delete exported files from OCI buckets. Only use rollback if export failed and needs to be restarted.
|
||||
```
|
||||
|
||||
## Expected Changes
|
||||
|
||||
### Data Export Summary
|
||||
**6 CSDB tables exported from OU_CSDB schema:**
|
||||
|
||||
**Group 1: Split DATA + HIST (Time Critical)**
|
||||
1. **DEBT** - Last 6 months → DATA, Older → HIST
|
||||
2. **DEBT_DAILY** - Last 6 months → DATA, Older → HIST
|
||||
|
||||
**Group 2: HIST Only (Weekend Bulk)**
|
||||
3. **INSTR_RAT_FULL** - All data → HIST
|
||||
4. **INSTR_DESC_FULL** - All data → HIST
|
||||
5. **ISSUER_RAT_FULL** - All data → HIST
|
||||
6. **ISSUER_DESC_FULL** - All data → HIST
|
||||
|
||||
### Bucket Destinations (DEV environment)
|
||||
- **DATA Bucket**: `mrds_data_dev/ODS/CSDB/` (CSV format)
|
||||
- **HIST Bucket**: `mrds_hist_dev/ARCHIVE/CSDB/` (Parquet with partitioning)
|
||||
|
||||
### Column Mappings
|
||||
- **All tables**: `A_ETL_LOAD_SET_FK` renamed to `A_WORKFLOW_HISTORY_KEY`
|
||||
- **DEBT**: Removed columns: `IDIRDEPOSITORY`, `VA_BONDDURATION`
|
||||
- **DEBT_DAILY**: Removed columns: `STEPID`, `PROGRAMNAME`, `PROGRAMCEILING`, `PROGRAMSTATUS`, `ISSUERNACE21SECTOR`, `INSTRUMENTQUOTATIONBASIS`
|
||||
|
||||
## Testing
|
||||
|
||||
### Post-Export Verification
|
||||
|
||||
1. **Verify CSV files in DATA bucket** (DEBT, DEBT_DAILY - last 6 months):
|
||||
```sql
|
||||
-- Check exported files
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_data_dev/o/ODS/CSDB/'
|
||||
)) WHERE object_name LIKE '%CSDB_DEBT%';
|
||||
```
|
||||
|
||||
2. **Verify Parquet files in HIST bucket** (all 6 tables):
|
||||
```sql
|
||||
-- Check archived files with Hive partitioning
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_hist_dev/o/ARCHIVE/CSDB/'
|
||||
)) WHERE object_name LIKE '%PARTITION_YEAR=%';
|
||||
```
|
||||
|
||||
3. **Validate row counts match source tables**:
|
||||
```sql
|
||||
-- Compare counts between source and exported data
|
||||
SELECT COUNT(*) FROM OU_CSDB.DEBT;
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS; -- External table pointing to DATA
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ARCHIVE; -- External table pointing to HIST
|
||||
```
|
||||
|
||||
4. **Verify column mappings**:
|
||||
```sql
|
||||
-- Check A_WORKFLOW_HISTORY_KEY exists in exported data
|
||||
SELECT A_WORKFLOW_HISTORY_KEY, COUNT(*)
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE
|
||||
GROUP BY A_WORKFLOW_HISTORY_KEY;
|
||||
```
|
||||
|
||||
## Known Issues
|
||||
|
||||
### Timing Constraints
|
||||
- **DATA exports (DEBT, DEBT_DAILY)**: Must execute during parallel old+new loads phase after Production deployment
|
||||
- **HIST exports (all 6 tables)**: Can run anytime, recommended for weekend bulk execution to avoid interference
|
||||
|
||||
### Environment-Specific Configuration
|
||||
- Bucket names must be adjusted for each environment:
|
||||
- DEV: `mrds_data_dev`, `mrds_hist_dev`
|
||||
- TEST: `mrds_data_test`, `mrds_hist_test`
|
||||
- PROD: `mrds_data_prod`, `mrds_hist_prod`
|
||||
|
||||
### Data Cutoff Date
|
||||
- Export scripts use 6-month cutoff date calculated as `ADD_MONTHS(SYSDATE, -6)`
|
||||
- Verify cutoff aligns with business requirements before execution
|
||||
|
||||
### One-Time Execution
|
||||
- This is a **ONE-TIME data migration** package
|
||||
- After successful execution, package should be **deactivated** (moved to REL01_POST_DEACTIVATION)
|
||||
- Do not re-run unless explicitly required for data refresh
|
||||
|
||||
## Related
|
||||
- **JIRA**: MARS-835 - CSDB Data Export to External Tables
|
||||
- **Confluence**: FILE_MANAGER package - MRDS - Technical Team
|
||||
- **Confluence**: Table Setup Guide for FILE PROCESSOR System
|
||||
- **Source Schema**: OU_CSDB (Operational Database)
|
||||
- **Target Schema**: ODS (External Tables)
|
||||
- **Migration Type**: One-time bulk export (deactivated post-execution)
|
||||
|
||||
---
|
||||
|
||||
**Author:** Grzegorz Michalski
|
||||
**Date:** 2025-12-04
|
||||
**Version:** 1.0.0
|
||||
@@ -1,207 +0,0 @@
|
||||
# MARS-835: Required External Tables for Smart Column Mapping
|
||||
|
||||
## Overview
|
||||
This document lists all external tables required for MARS-835 data exports using DATA_EXPORTER v2.4.0 with Smart Column Mapping feature.
|
||||
|
||||
**Purpose**: Smart Column Mapping ensures CSV files are generated with columns in the EXACT order expected by external tables, preventing NULL values due to Oracle's positional CSV mapping.
|
||||
|
||||
---
|
||||
|
||||
## Required External Tables
|
||||
|
||||
### Group 1: DATA Bucket (CSV Format) - **CRITICAL**
|
||||
|
||||
#### 1. ODS.CSDB_DEBT_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 2. ODS.CSDB_DEBT_DAILY_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
### Group 2: ARCHIVE Bucket (Parquet Format) - **RECOMMENDED**
|
||||
|
||||
#### 3. ODS.CSDB_DEBT_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
- **Note**: Parquet uses schema-based mapping (column order less critical but Smart Column Mapping ensures consistency)
|
||||
|
||||
#### 4. ODS.CSDB_DEBT_DAILY_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 5. ODS.CSDB_INSTR_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 6. ODS.CSDB_INSTR_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 7. ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 8. ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
## External Table Column Order Requirements
|
||||
|
||||
### **CRITICAL for CSV Tables** (DATA bucket):
|
||||
|
||||
All CSV external tables MUST have **A_WORKFLOW_HISTORY_KEY at position 2**:
|
||||
|
||||
```
|
||||
Position 1: A_KEY (NUMBER)
|
||||
Position 2: A_WORKFLOW_HISTORY_KEY (NUMBER) ← MUST BE HERE!
|
||||
Position 3+: Other columns in any order
|
||||
```
|
||||
|
||||
**Reason**: Oracle External Tables with CSV format use **positional mapping** (ignore header row). If source table has A_ETL_LOAD_SET_FK at position 72, but CSV puts it at position 72 while external table expects A_WORKFLOW_HISTORY_KEY at position 2, the external table will try to read position 2 (which might be a DATE column) as NUMBER → conversion fails → NULL value.
|
||||
|
||||
**Solution**: Smart Column Mapping (v2.4.0) generates CSV columns in EXTERNAL TABLE order, ensuring position 2 has the correct NUMBER value.
|
||||
|
||||
### **OPTIONAL for Parquet Tables** (ARCHIVE bucket):
|
||||
|
||||
Parquet format uses **schema-based mapping** (column names). Column order doesn't matter, but Smart Column Mapping provides consistency.
|
||||
|
||||
---
|
||||
|
||||
## Creation Script Example
|
||||
|
||||
### CSV External Table (CRITICAL - Correct Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_DATA_ODS
|
||||
-- IMPORTANT: A_WORKFLOW_HISTORY_KEY must be at position 2!
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_DATA_ODS',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ODS/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvDataBucketUri,
|
||||
pFormat => 'CSV' -- Uses positional mapping!
|
||||
);
|
||||
END;
|
||||
/
|
||||
|
||||
-- Verify column order (A_WORKFLOW_HISTORY_KEY should be position 2)
|
||||
SELECT column_id, column_name, data_type
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = 'CSDB_DEBT_DATA_ODS'
|
||||
AND owner = 'ODS'
|
||||
ORDER BY column_id;
|
||||
```
|
||||
|
||||
### Parquet External Table (Optional Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_ARCHIVE
|
||||
-- Column order flexible (schema-based mapping)
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_ARCHIVE',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvArchiveBucketUri,
|
||||
pFormat => 'PARQUET' -- Uses schema-based mapping
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Template Tables Required
|
||||
|
||||
All external tables require corresponding template tables in CT_ET_TEMPLATES schema:
|
||||
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_DAILY_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL_TEMPLATE`
|
||||
|
||||
**Note**: Template tables must be created by ADMIN or CT_ET_TEMPLATES user (MRDS_LOADER cannot create them).
|
||||
|
||||
---
|
||||
|
||||
## Verification Checklist
|
||||
|
||||
Before running MARS-835 exports:
|
||||
|
||||
- [ ] All 8 external tables exist in ODS schema
|
||||
- [ ] CSV tables (DATA bucket) have A_WORKFLOW_HISTORY_KEY at position 2
|
||||
- [ ] Template tables exist in CT_ET_TEMPLATES schema
|
||||
- [ ] MRDS_LOADER has EXECUTE privilege on ODS.FILE_MANAGER_ODS
|
||||
- [ ] ODS schema has access to CT_MRDS.ENV_MANAGER for logging
|
||||
- [ ] DATA_EXPORTER v2.4.0 deployed with Smart Column Mapping feature
|
||||
|
||||
---
|
||||
|
||||
## Testing Verification
|
||||
|
||||
After export, verify A_WORKFLOW_HISTORY_KEY is not NULL:
|
||||
|
||||
```sql
|
||||
-- CSV tables (should be 100% populated)
|
||||
SELECT 'CSDB_DEBT_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DATA_ODS;
|
||||
|
||||
SELECT 'CSDB_DEBT_DAILY_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DAILY_DATA_ODS;
|
||||
|
||||
-- Parquet tables (should also be 100% populated)
|
||||
SELECT 'CSDB_DEBT_ARCHIVE' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE;
|
||||
```
|
||||
|
||||
**Expected Result**: SUCCESS_RATE_PCT = 100.00 for all tables
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [DATA_EXPORTER v2.4.0 Smart Column Mapping Examples](../MARS-835-PREHOOK/current_version/v2.3.0/DATA_EXPORTER_v2.4.0_Smart_Column_Mapping_Examples.sql)
|
||||
- [Oracle External Tables Column Order Issue](../../confluence/additions/Oracle_External_Tables_Column_Order_Issue.md)
|
||||
- [MARS-835 README](README.md)
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2026-01-09
|
||||
**Author**: GitHub Copilot (MARS-835 Update)
|
||||
@@ -59,7 +59,13 @@ PROMPT =========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Verify Rollback Completed
|
||||
PROMPT Step 3: Delete File Registration Records from A_SOURCE_FILE_RECEIVED
|
||||
PROMPT =========================================================================
|
||||
@@90_MARS_835_rollback_file_registrations.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Rollback Completed
|
||||
PROMPT =========================================================================
|
||||
@@99_MARS_835_verify_rollback.sql
|
||||
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- Add or remove entries as needed for your MARS issue
|
||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.FILE_MANAGER',
|
||||
'ODS.FILE_MANAGER_ODS'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT ' || vPackageList(i) || '.GET_VERSION() FROM DUAL'
|
||||
INTO vVersion;
|
||||
|
||||
vPackages.EXTEND;
|
||||
vPackages(vPackages.COUNT).owner := vOwner;
|
||||
vPackages(vPackages.COUNT).name := vPackageName;
|
||||
vPackages(vPackages.COUNT).version := vVersion;
|
||||
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
||||
pPackageAuthor => 'Grzegorz Michalski'
|
||||
);
|
||||
vCount := vCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Error tracking ' || vPackageList(i) || ': ' || SQLERRM);
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- Display results
|
||||
IF vPackages.COUNT > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || ' of ' || vPackages.COUNT);
|
||||
FOR i IN 1..vPackages.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).name ||
|
||||
' (v' || vPackages(i).version || ')');
|
||||
END LOOP;
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('No packages found in list');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
@@ -1,62 +0,0 @@
|
||||
-- ===================================================================
|
||||
-- Universal Package Version Verification Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify all tracked Oracle packages for code changes
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- USAGE:
|
||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
||||
--
|
||||
-- OUTPUT:
|
||||
-- - List of all tracked packages with their current status
|
||||
-- - OK: Package has not changed since last tracking
|
||||
-- - WARNING: Package code changed without version update
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET FEEDBACK OFF
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Package Version Verification
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
|
||||
COLUMN PACKAGE_OWNER FORMAT A15
|
||||
COLUMN PACKAGE_NAME FORMAT A20
|
||||
COLUMN VERSION FORMAT A10
|
||||
COLUMN STATUS FORMAT A80
|
||||
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION AS VERSION,
|
||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
||||
FROM (
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION,
|
||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
)
|
||||
WHERE RN = 1
|
||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Verification Complete
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
PROMPT Legend:
|
||||
PROMPT OK - Package has not changed since last tracking
|
||||
PROMPT WARNING - Package code changed without version update
|
||||
PROMPT
|
||||
PROMPT For detailed hash information, use:
|
||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
||||
PROMPT ========================================
|
||||
|
||||
SET FEEDBACK ON
|
||||
5
MARS_Packages/REL02_POST/MARS-956/.gitignore
vendored
Normal file
5
MARS_Packages/REL02_POST/MARS-956/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Exclude temporary folders from version control
|
||||
confluence/
|
||||
log/
|
||||
test/
|
||||
mock_data/
|
||||
@@ -1,156 +1,534 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956: Export Historical C2D MPEC Data to DATA Bucket
|
||||
-- ===================================================================
|
||||
-- Purpose: One-time export of historical C2D MPEC delta data from
|
||||
-- OU_C2D operational database to DATA bucket as CSV files
|
||||
-- Method: Using DATA_EXPORTER.EXPORT_TABLE_DATA procedure
|
||||
-- Target: DATA bucket with folder structure DATA/C2D/{TABLE_NAME}
|
||||
-- Format: CSV files for complete historical data access
|
||||
-- ===================================================================
|
||||
-- =====================================================================================
|
||||
-- Script: 01_MARS_956_export_c2d_mpec_data.sql
|
||||
-- Purpose: Export C2D MPEC historical data to ODS bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2026-02-12
|
||||
-- MARS Issue: MARS-956
|
||||
-- Target: mrds_data_dev/ODS/C2D/
|
||||
-- =====================================================================================
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956: Starting C2D MPEC Historical Data Export
|
||||
PROMPT =========================================================================
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET TIMING ON;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-956: C2D MPEC Historical Data Export
|
||||
PROMPT =====================================================================================
|
||||
PROMPT Export Strategy:
|
||||
PROMPT - Source: OU_C2D schema tables (operational database)
|
||||
PROMPT - Target: DATA bucket as CSV files
|
||||
PROMPT - Source: OU_LEGACY_C2D schema tables (operational database)
|
||||
PROMPT - Target: ODS bucket as CSV files
|
||||
PROMPT - Method: DATA_EXPORTER.EXPORT_TABLE_DATA
|
||||
PROMPT - Structure: Must match ODS template tables
|
||||
PROMPT - Registration: Files registered in A_SOURCE_FILE_RECEIVED
|
||||
PROMPT =========================================================================
|
||||
PROMPT - Path Structure: ODS/C2D/C2D_MPEC_*/
|
||||
PROMPT =====================================================================================
|
||||
|
||||
-- Log export start
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PACKAGE_NAME, PROCEDURE_NAME, EVENT_TYPE, EVENT_MESSAGE, PROCEDURE_PARAMETERS)
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE, PROCEDURE_PARAMETERS)
|
||||
VALUES ('MARS-956', 'EXPORT_C2D_MPEC_DATA', 'INFO', 'Starting historical C2D MPEC data export',
|
||||
'Tables: MPEC_ADMIN, MPEC_CONTENT, MPEC_CONTENT_CRITERION');
|
||||
COMMIT;
|
||||
|
||||
-- ===================================================================
|
||||
-- TABLE 1: OU_C2D.MPEC_ADMIN -> DATA/C2D/C2D_MPEC_ADMIN
|
||||
-- ===================================================================
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT PRE-EXPORT CHECK: Verify Existing Files in ODS Bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
PROMPT Exporting Table 1/3: OU_C2D.MPEC_ADMIN
|
||||
PROMPT Target: mrds_data_dev/DATA/C2D/C2D_MPEC_ADMIN
|
||||
-- Check 1: MPEC_ADMIN files
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vRecordCount NUMBER := 0;
|
||||
vLocationUri VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Get bucket URI for DATA bucket
|
||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/C2D/C2D_MPEC_ADMIN/';
|
||||
|
||||
-- Count existing files
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: MPEC_ADMIN files already exist in DATA bucket');
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- List existing files
|
||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||
END LOOP;
|
||||
|
||||
-- Count records in external table
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_ADMIN_ODS' INTO vRecordCount;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
END;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing MPEC_ADMIN files found - bucket is clean');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 2: MPEC_CONTENT files
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vRecordCount NUMBER := 0;
|
||||
vLocationUri VARCHAR2(1000);
|
||||
BEGIN
|
||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/C2D/C2D_MPEC_CONTENT/';
|
||||
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/';
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: MPEC_CONTENT files already exist in DATA bucket');
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||
END LOOP;
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_ODS' INTO vRecordCount;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
END;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing MPEC_CONTENT files found - bucket is clean');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 3: MPEC_CONTENT_CRITERION files
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vRecordCount NUMBER := 0;
|
||||
vLocationUri VARCHAR2(1000);
|
||||
BEGIN
|
||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/C2D/C2D_MPEC_CONTENT_CRITERION/';
|
||||
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/';
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: MPEC_CONTENT_CRITERION files already exist in DATA bucket');
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||
END LOOP;
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_CRITERION_ODS' INTO vRecordCount;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
END;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing MPEC_CONTENT_CRITERION files found - bucket is clean');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT PRE-EXPORT: Verify Source and Target Table Readiness
|
||||
PROMPT =====================================================================================
|
||||
|
||||
-- Check source table counts before export
|
||||
DECLARE
|
||||
vAdminRows NUMBER := 0;
|
||||
vContentRows NUMBER := 0;
|
||||
vCriterionRows NUMBER := 0;
|
||||
vTotalSource NUMBER := 0;
|
||||
vAdminTarget NUMBER := 0;
|
||||
vContentTarget NUMBER := 0;
|
||||
vCriterionTarget NUMBER := 0;
|
||||
vTotalTarget NUMBER := 0;
|
||||
BEGIN
|
||||
-- Source table counts
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminRows;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentRows;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionRows;
|
||||
vTotalSource := vAdminRows + vContentRows + vCriterionRows;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Source table record counts (pre-export):');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN: ' || vAdminRows || ' records');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT: ' || vContentRows || ' records');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION: ' || vCriterionRows || ' records');
|
||||
DBMS_OUTPUT.PUT_LINE('- TOTAL SOURCE: ' || vTotalSource || ' records');
|
||||
|
||||
-- Target external table counts (current state)
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_ADMIN_ODS' INTO vAdminTarget;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||
vAdminTarget := 0; -- Empty is expected
|
||||
ELSE
|
||||
vAdminTarget := -1; -- Error
|
||||
END IF;
|
||||
END;
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_ODS' INTO vContentTarget;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||
vContentTarget := 0;
|
||||
ELSE
|
||||
vContentTarget := -1;
|
||||
END IF;
|
||||
END;
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_CRITERION_ODS' INTO vCriterionTarget;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||
vCriterionTarget := 0;
|
||||
ELSE
|
||||
vCriterionTarget := -1;
|
||||
END IF;
|
||||
END;
|
||||
|
||||
IF vAdminTarget >= 0 AND vContentTarget >= 0 AND vCriterionTarget >= 0 THEN
|
||||
vTotalTarget := vAdminTarget + vContentTarget + vCriterionTarget;
|
||||
ELSE
|
||||
vTotalTarget := -1; -- Error state
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Target external table record counts (pre-export):');
|
||||
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_ADMIN_ODS: ' ||
|
||||
CASE WHEN vAdminTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vAdminTarget) END);
|
||||
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT_ODS: ' ||
|
||||
CASE WHEN vContentTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vContentTarget) END);
|
||||
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT_CRITERION_ODS: ' ||
|
||||
CASE WHEN vCriterionTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vCriterionTarget) END);
|
||||
DBMS_OUTPUT.PUT_LINE('- TOTAL TARGET: ' ||
|
||||
CASE WHEN vTotalTarget = -1 THEN 'ERROR/INACCESSIBLE' ELSE TO_CHAR(vTotalTarget) END);
|
||||
|
||||
IF vTotalSource > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Source tables contain data - ready for export');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: WARNING: No source data found');
|
||||
END IF;
|
||||
|
||||
IF vTotalTarget = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Target external tables are clean - ready for fresh export');
|
||||
ELSIF vTotalTarget > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Target tables contain ' || vTotalTarget || ' records - may be re-run');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Cannot access target external tables');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Proceeding with export...');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT TABLE 1/3: OU_LEGACY_C2D.MPEC_ADMIN -> ODS/C2D/C2D_MPEC_ADMIN
|
||||
PROMPT =====================================================================================
|
||||
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
pSchemaName => 'OU_C2D',
|
||||
pSchemaName => 'OU_LEGACY_C2D',
|
||||
pTableName => 'MPEC_ADMIN',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK', -- ETL key for data lookup
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'DATA/C2D/C2D_MPEC_ADMIN',
|
||||
pBucketArea => 'ODS',
|
||||
pFolderName => 'ODS/C2D/C2D_MPEC_ADMIN',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_ADMIN', -- Template for column order
|
||||
pRegisterExport => TRUE, -- Register files in A_SOURCE_FILE_RECEIVED
|
||||
pCredentialName => 'DEF_CRED_ARN'
|
||||
pMaxFileSize => 104857600, -- 100MB max file size
|
||||
pRegisterExport => TRUE, -- Register files in A_SOURCE_FILE_RECEIVED
|
||||
pProcessName => 'MARS-956' -- Process identifier for tracking
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('✓ MPEC_ADMIN export completed successfully');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: MPEC_ADMIN export completed successfully');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('✗ MPEC_ADMIN export failed: ' || SQLERRM);
|
||||
-- Log error but continue with other tables
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PACKAGE_NAME, PROCEDURE_NAME, EVENT_TYPE, EVENT_MESSAGE)
|
||||
VALUES ('MARS-956', 'EXPORT_MPEC_ADMIN', 'ERROR', 'Export failed: ' || SQLERRM);
|
||||
COMMIT;
|
||||
RAISE;
|
||||
DECLARE
|
||||
vErrorMsg VARCHAR2(4000) := SUBSTR(SQLERRM, 1, 4000);
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: MPEC_ADMIN export failed: ' || vErrorMsg);
|
||||
-- Log error using proper ENV_MANAGER pattern
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG
|
||||
(guid, Username, Osuser, Machine, Module, process_name, procedure_name, procedure_parameters, log_level, log_message)
|
||||
VALUES
|
||||
('MARS-956', USER, SYS_CONTEXT('USERENV','OS_USER'), SYS_CONTEXT('USERENV','HOST'),
|
||||
'MARS-956', 'MARS-956', 'EXPORT_MPEC_ADMIN', NULL, 'ERROR',
|
||||
'Export failed: ' || vErrorMsg);
|
||||
COMMIT;
|
||||
END;
|
||||
END;
|
||||
/
|
||||
|
||||
-- ===================================================================
|
||||
-- TABLE 2: OU_C2D.MPEC_CONTENT -> DATA/C2D/C2D_MPEC_CONTENT
|
||||
-- ===================================================================
|
||||
|
||||
PROMPT Exporting Table 2/3: OU_C2D.MPEC_CONTENT
|
||||
PROMPT Target: mrds_data_dev/DATA/C2D/C2D_MPEC_CONTENT
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT TABLE 2/3: OU_LEGACY_C2D.MPEC_CONTENT -> ODS/C2D/C2D_MPEC_CONTENT
|
||||
PROMPT =====================================================================================
|
||||
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
pSchemaName => 'OU_C2D',
|
||||
pSchemaName => 'OU_LEGACY_C2D',
|
||||
pTableName => 'MPEC_CONTENT',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK', -- ETL key for data lookup
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'DATA/C2D/C2D_MPEC_CONTENT',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_CONTENT', -- Template for column order
|
||||
pRegisterExport => TRUE, -- Register files in A_SOURCE_FILE_RECEIVED
|
||||
pCredentialName => 'DEF_CRED_ARN'
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ODS',
|
||||
pFolderName => 'ODS/C2D/C2D_MPEC_CONTENT',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_CONTENT',
|
||||
pMaxFileSize => 104857600, -- 100MB max file size
|
||||
pRegisterExport => TRUE,
|
||||
pProcessName => 'MARS-956' -- Process identifier for tracking
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('✓ MPEC_CONTENT export completed successfully');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: MPEC_CONTENT export completed successfully');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('✗ MPEC_CONTENT export failed: ' || SQLERRM);
|
||||
-- Log error but continue with other tables
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PACKAGE_NAME, PROCEDURE_NAME, EVENT_TYPE, EVENT_MESSAGE)
|
||||
VALUES ('MARS-956', 'EXPORT_MPEC_CONTENT', 'ERROR', 'Export failed: ' || SQLERRM);
|
||||
COMMIT;
|
||||
RAISE;
|
||||
DECLARE
|
||||
vErrorMsg VARCHAR2(4000) := SUBSTR(SQLERRM, 1, 4000);
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: MPEC_CONTENT export failed: ' || vErrorMsg);
|
||||
-- Log error using proper ENV_MANAGER pattern
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG
|
||||
(guid, Username, Osuser, Machine, Module, process_name, procedure_name, procedure_parameters, log_level, log_message)
|
||||
VALUES
|
||||
('MARS-956', USER, SYS_CONTEXT('USERENV','OS_USER'), SYS_CONTEXT('USERENV','HOST'),
|
||||
'MARS-956', 'MARS-956', 'EXPORT_MPEC_CONTENT', NULL, 'ERROR',
|
||||
'Export failed: ' || vErrorMsg);
|
||||
COMMIT;
|
||||
END;
|
||||
END;
|
||||
/
|
||||
|
||||
-- ===================================================================
|
||||
-- TABLE 3: OU_C2D.MPEC_CONTENT_CRITERION -> DATA/C2D/C2D_MPEC_CONTENT_CRITERION
|
||||
-- ===================================================================
|
||||
|
||||
PROMPT Exporting Table 3/3: OU_C2D.MPEC_CONTENT_CRITERION
|
||||
PROMPT Target: mrds_data_dev/DATA/C2D/C2D_MPEC_CONTENT_CRITERION
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT TABLE 3/3: OU_LEGACY_C2D.MPEC_CONTENT_CRITERION -> ODS/C2D/C2D_MPEC_CONTENT_CRITERION
|
||||
PROMPT =====================================================================================
|
||||
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
pSchemaName => 'OU_C2D',
|
||||
pSchemaName => 'OU_LEGACY_C2D',
|
||||
pTableName => 'MPEC_CONTENT_CRITERION',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK', -- ETL key for data lookup
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'DATA/C2D/C2D_MPEC_CONTENT_CRITERION',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_CONTENT_CRITERION', -- Template for column order
|
||||
pRegisterExport => TRUE, -- Register files in A_SOURCE_FILE_RECEIVED
|
||||
pCredentialName => 'DEF_CRED_ARN'
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ODS',
|
||||
pFolderName => 'ODS/C2D/C2D_MPEC_CONTENT_CRITERION',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.C2D_MPEC_CONTENT_CRITERION',
|
||||
pMaxFileSize => 104857600, -- 100MB max file size
|
||||
pRegisterExport => TRUE,
|
||||
pProcessName => 'MARS-956' -- Process identifier for tracking
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('✓ MPEC_CONTENT_CRITERION export completed successfully');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: MPEC_CONTENT_CRITERION export completed successfully');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('✗ MPEC_CONTENT_CRITERION export failed: ' || SQLERRM);
|
||||
-- Log error
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PACKAGE_NAME, PROCEDURE_NAME, EVENT_TYPE, EVENT_MESSAGE)
|
||||
VALUES ('MARS-956', 'EXPORT_MPEC_CONTENT_CRITERION', 'ERROR', 'Export failed: ' || SQLERRM);
|
||||
COMMIT;
|
||||
RAISE;
|
||||
DECLARE
|
||||
vErrorMsg VARCHAR2(4000) := SUBSTR(SQLERRM, 1, 4000);
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: MPEC_CONTENT_CRITERION export failed: ' || vErrorMsg);
|
||||
-- Log error using proper ENV_MANAGER pattern
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG
|
||||
(guid, Username, Osuser, Machine, Module, process_name, procedure_name, procedure_parameters, log_level, log_message)
|
||||
VALUES
|
||||
('MARS-956', USER, SYS_CONTEXT('USERENV','OS_USER'), SYS_CONTEXT('USERENV','HOST'),
|
||||
'MARS-956', 'MARS-956', 'EXPORT_MPEC_CONTENT_CRITERION', NULL, 'ERROR',
|
||||
'Export failed: ' || vErrorMsg);
|
||||
COMMIT;
|
||||
END;
|
||||
END;
|
||||
/
|
||||
|
||||
-- ===================================================================
|
||||
-- Export Summary and Verification
|
||||
-- ===================================================================
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT Export Summary - Checking Results
|
||||
PROMPT =========================================================================
|
||||
PROMPT =====================================================================================
|
||||
|
||||
-- Log completion
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PACKAGE_NAME, PROCEDURE_NAME, EVENT_TYPE, EVENT_MESSAGE)
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||
VALUES ('MARS-956', 'EXPORT_C2D_MPEC_DATA', 'INFO', 'All C2D MPEC historical exports completed successfully');
|
||||
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-956 C2D MPEC Export Completed Successfully!
|
||||
PROMPT =====================================================================================
|
||||
PROMPT POST-EXPORT: Source vs Target Record Count Comparison
|
||||
PROMPT =====================================================================================
|
||||
|
||||
-- Verify record counts after export
|
||||
DECLARE
|
||||
vAdminSource NUMBER := 0;
|
||||
vContentSource NUMBER := 0;
|
||||
vCriterionSource NUMBER := 0;
|
||||
vTotalSource NUMBER := 0;
|
||||
vAdminTarget NUMBER := 0;
|
||||
vContentTarget NUMBER := 0;
|
||||
vCriterionTarget NUMBER := 0;
|
||||
vTotalTarget NUMBER := 0;
|
||||
vMismatchCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Source table counts
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminSource;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentSource;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionSource;
|
||||
vTotalSource := vAdminSource + vContentSource + vCriterionSource;
|
||||
|
||||
-- Target external table counts
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_ADMIN_ODS' INTO vAdminTarget;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_ODS' INTO vContentTarget;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_CRITERION_ODS' INTO vCriterionTarget;
|
||||
vTotalTarget := vAdminTarget + vContentTarget + vCriterionTarget;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('POST-EXPORT VERIFICATION SUMMARY');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Table | Source | Target | Match');
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------');
|
||||
|
||||
-- MPEC_ADMIN comparison
|
||||
DBMS_OUTPUT.PUT_LINE('MPEC_ADMIN | ' ||
|
||||
RPAD(vAdminSource, 8) || ' | ' ||
|
||||
RPAD(vAdminTarget, 8) || ' | ' ||
|
||||
CASE WHEN vAdminSource = vAdminTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||
IF vAdminSource != vAdminTarget THEN vMismatchCount := vMismatchCount + 1; END IF;
|
||||
|
||||
-- MPEC_CONTENT comparison
|
||||
DBMS_OUTPUT.PUT_LINE('MPEC_CONTENT | ' ||
|
||||
RPAD(vContentSource, 8) || ' | ' ||
|
||||
RPAD(vContentTarget, 8) || ' | ' ||
|
||||
CASE WHEN vContentSource = vContentTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||
IF vContentSource != vContentTarget THEN vMismatchCount := vMismatchCount + 1; END IF;
|
||||
|
||||
-- MPEC_CONTENT_CRITERION comparison
|
||||
DBMS_OUTPUT.PUT_LINE('MPEC_CONTENT_CRITERION | ' ||
|
||||
RPAD(vCriterionSource, 8) || ' | ' ||
|
||||
RPAD(vCriterionTarget, 8) || ' | ' ||
|
||||
CASE WHEN vCriterionSource = vCriterionTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||
IF vCriterionSource != vCriterionTarget THEN vMismatchCount := vMismatchCount + 1; END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('TOTAL | ' ||
|
||||
RPAD(vTotalSource, 8) || ' | ' ||
|
||||
RPAD(vTotalTarget, 8) || ' | ' ||
|
||||
CASE WHEN vTotalSource = vTotalTarget THEN 'OK' ELSE 'MISMATCH' END);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
IF vMismatchCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All record counts match - export verified');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vMismatchCount || ' table(s) have record count mismatches');
|
||||
DBMS_OUTPUT.PUT_LINE(' Please review export logs and external table access permissions');
|
||||
END IF;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Cannot verify target external tables post-export');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Please check external table configuration and ODS bucket access');
|
||||
END;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Log export completion
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE, PROCEDURE_PARAMETERS)
|
||||
VALUES ('MARS-956', 'EXPORT_C2D_MPEC_DATA', 'INFO', 'Historical C2D MPEC data export completed',
|
||||
'Check verification scripts for detailed results');
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display recent export activity
|
||||
PROMPT Recent Export Activity (last 30 minutes):
|
||||
SELECT TO_CHAR(EVENT_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS EXPORT_TIME,
|
||||
PACKAGE_NAME,
|
||||
PROCEDURE_NAME,
|
||||
EVENT_TYPE,
|
||||
EVENT_MESSAGE
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PACKAGE_NAME = 'MARS-956'
|
||||
OR PROCEDURE_NAME LIKE '%DATA_EXPORTER%'
|
||||
AND EVENT_TIMESTAMP >= SYSTIMESTAMP - INTERVAL '30' MINUTE
|
||||
ORDER BY EVENT_TIMESTAMP DESC
|
||||
FETCH FIRST 20 ROWS ONLY;
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Export Completed Successfully!
|
||||
PROMPT =========================================================================
|
||||
PROMPT Next Steps:
|
||||
PROMPT 1. Verify CSV files created in DATA bucket
|
||||
PROMPT 2. Check file structure matches template tables
|
||||
PROMPT 3. Validate row counts match source tables
|
||||
PROMPT 4. Confirm data available for delta queries
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-956 C2D MPEC Historical Data Export - COMPLETED
|
||||
PROMPT
|
||||
PROMPT Next steps:
|
||||
PROMPT 1. Run: @02_MARS_956_verify_exports.sql (verify file registration)
|
||||
PROMPT 2. Run: @03_MARS_956_verify_data_integrity.sql (full data verification)
|
||||
PROMPT =====================================================================================
|
||||
190
MARS_Packages/REL02_POST/MARS-956/02_MARS_956_verify_exports.sql
Normal file
190
MARS_Packages/REL02_POST/MARS-956/02_MARS_956_verify_exports.sql
Normal file
@@ -0,0 +1,190 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 Verify Exports: Check Export Results and File Creation
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify that C2D MPEC export completed successfully
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-12
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Export Verification
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Check 1: Verify files were registered in A_SOURCE_FILE_RECEIVED
|
||||
PROMPT Checking export file registration...
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalBytes NUMBER := 0;
|
||||
BEGIN
|
||||
SELECT COUNT(*), NVL(SUM(BYTES), 0)
|
||||
INTO vFileCount, vTotalBytes
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE RECEPTION_DATE >= SYSDATE - 1/24 -- Last hour
|
||||
AND (SOURCE_FILE_NAME LIKE '2001_%' -- MPEC_ADMIN ETL keys
|
||||
OR SOURCE_FILE_NAME LIKE '2002_%'
|
||||
OR SOURCE_FILE_NAME LIKE '2003_%'
|
||||
OR SOURCE_FILE_NAME LIKE '2004_%'
|
||||
OR SOURCE_FILE_NAME LIKE '2005_%'
|
||||
OR SOURCE_FILE_NAME LIKE '2006_%' -- MPEC_CONTENT ETL keys
|
||||
OR SOURCE_FILE_NAME LIKE '2007_%'
|
||||
OR SOURCE_FILE_NAME LIKE '2008_%'
|
||||
OR SOURCE_FILE_NAME LIKE '2009_%' -- MPEC_CONTENT_CRITERION ETL keys
|
||||
OR SOURCE_FILE_NAME LIKE '2010_%');
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Registered export files: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Total file size: ' || ROUND(vTotalBytes/1024, 2) || ' KB');
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: No export files found in registration');
|
||||
ELSIF vFileCount < 9 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Expected 9 files (3 tables x 3 ETL keys), found: ' || vFileCount);
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All expected export files found');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 2: Show recent export registrations
|
||||
PROMPT Recent export file registrations:
|
||||
SELECT
|
||||
SUBSTR(SOURCE_FILE_NAME, 1, 40) AS FILE_NAME,
|
||||
A_SOURCE_FILE_CONFIG_KEY AS CONFIG_KEY,
|
||||
PROCESSING_STATUS,
|
||||
ROUND(BYTES/1024, 2) AS SIZE_KB,
|
||||
TO_CHAR(RECEPTION_DATE, 'HH24:MI:SS') AS TIME_EXPORTED
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE RECEPTION_DATE >= SYSDATE - 1/24 -- Last hour
|
||||
AND (SOURCE_FILE_NAME LIKE '200%') -- ETL keys starting with 200
|
||||
ORDER BY RECEPTION_DATE DESC;
|
||||
|
||||
-- Check 3: Verify export process logs
|
||||
PROMPT Checking export process logs...
|
||||
DECLARE
|
||||
vLogCount NUMBER := 0;
|
||||
vErrorCount NUMBER := 0;
|
||||
BEGIN
|
||||
SELECT COUNT(*), SUM(CASE WHEN LOG_LEVEL = 'ERROR' THEN 1 ELSE 0 END)
|
||||
INTO vLogCount, vErrorCount
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND LOG_TIMESTAMP >= SYSTIMESTAMP - INTERVAL '1' HOUR;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Process log entries: ' || vLogCount);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Error entries: ' || vErrorCount);
|
||||
|
||||
IF vErrorCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vErrorCount || ' errors found in process log');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: No errors found in process log');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 4: Display recent process logs
|
||||
PROMPT Recent MARS-956 process logs:
|
||||
SELECT
|
||||
TO_CHAR(LOG_TIMESTAMP, 'HH24:MI:SS') AS TIME,
|
||||
PROCEDURE_NAME,
|
||||
LOG_LEVEL,
|
||||
SUBSTR(LOG_MESSAGE, 1, 60) AS MESSAGE
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND LOG_TIMESTAMP >= SYSTIMESTAMP - INTERVAL '1' HOUR
|
||||
ORDER BY LOG_TIMESTAMP DESC
|
||||
FETCH FIRST 10 ROWS ONLY;
|
||||
|
||||
-- Check 5: Cloud bucket file verification (if cloud_wrapper available)
|
||||
PROMPT Checking cloud bucket files...
|
||||
DECLARE
|
||||
vCloudFileCount NUMBER := 0;
|
||||
vCredentialName VARCHAR2(100);
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ODS');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Checking ODS bucket: ' || vDataBucketUri);
|
||||
|
||||
-- Count files in cloud bucket
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri
|
||||
))
|
||||
WHERE object_name LIKE 'ODS/C2D/C2D_MPEC_%'
|
||||
) LOOP
|
||||
vCloudFileCount := vCloudFileCount + 1;
|
||||
IF vCloudFileCount <= 5 THEN -- Show first 5 files
|
||||
DBMS_OUTPUT.PUT_LINE('- ' || rec.object_name);
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Cloud bucket files found: ' || vCloudFileCount);
|
||||
|
||||
IF vCloudFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: No files found in cloud bucket');
|
||||
END IF;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot access cloud bucket: ' || SQLERRM);
|
||||
END;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Export Verification Summary
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Final verification summary
|
||||
DECLARE
|
||||
vFileRegCount NUMBER := 0;
|
||||
vCloudFileCount NUMBER := 0;
|
||||
vLogErrorCount NUMBER := 0;
|
||||
vOverallStatus VARCHAR2(20);
|
||||
BEGIN
|
||||
-- Count registered files
|
||||
SELECT COUNT(*)
|
||||
INTO vFileRegCount
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE RECEPTION_DATE >= SYSDATE - 1/24
|
||||
AND SOURCE_FILE_NAME LIKE '200%';
|
||||
|
||||
-- Count process errors
|
||||
SELECT COUNT(*)
|
||||
INTO vLogErrorCount
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND LOG_LEVEL = 'ERROR'
|
||||
AND LOG_TIMESTAMP >= SYSTIMESTAMP - INTERVAL '1' HOUR;
|
||||
|
||||
-- Determine overall status
|
||||
IF vFileRegCount >= 9 AND vLogErrorCount = 0 THEN
|
||||
vOverallStatus := 'SUCCESS';
|
||||
ELSIF vFileRegCount > 0 AND vLogErrorCount = 0 THEN
|
||||
vOverallStatus := 'PARTIAL SUCCESS';
|
||||
ELSE
|
||||
vOverallStatus := 'ISSUES DETECTED';
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('MARS-956 Export Verification: ' || vOverallStatus);
|
||||
DBMS_OUTPUT.PUT_LINE('- Registered files: ' || vFileRegCount || ' (expected: 9)');
|
||||
DBMS_OUTPUT.PUT_LINE('- Process errors: ' || vLogErrorCount);
|
||||
|
||||
IF vOverallStatus = 'SUCCESS' THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All validations passed - export successful');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Some issues detected - review logs');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT Export Verification Completed
|
||||
PROMPT =========================================================================
|
||||
@@ -0,0 +1,354 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 Verify Data Integrity: Source vs Exported Data Validation
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify data integrity between source tables and exported files
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-12
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Data Integrity Verification
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Check 1: Source table record counts vs expected ETL keys
|
||||
PROMPT Checking source table record counts...
|
||||
DECLARE
|
||||
vAdminRows NUMBER := 0;
|
||||
vContentRows NUMBER := 0;
|
||||
vCriterionRows NUMBER := 0;
|
||||
vTotalRows NUMBER := 0;
|
||||
vExpectedFiles NUMBER := 9; -- 3 tables x 3 ETL keys average
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminRows;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentRows;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionRows;
|
||||
|
||||
vTotalRows := vAdminRows + vContentRows + vCriterionRows;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Source table record counts:');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN: ' || vAdminRows || ' records');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT: ' || vContentRows || ' records');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION: ' || vCriterionRows || ' records');
|
||||
DBMS_OUTPUT.PUT_LINE('- TOTAL: ' || vTotalRows || ' records');
|
||||
|
||||
IF vTotalRows > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All source tables contain data');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: No data found in source tables');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 2: ETL key distribution analysis
|
||||
PROMPT Checking ETL key distribution...
|
||||
DECLARE
|
||||
vAdminKeys NUMBER := 0;
|
||||
vContentKeys NUMBER := 0;
|
||||
vCriterionKeys NUMBER := 0;
|
||||
vTotalKeys NUMBER := 0;
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminKeys;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentKeys;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionKeys;
|
||||
|
||||
SELECT COUNT(DISTINCT etl_key)
|
||||
INTO vTotalKeys
|
||||
FROM (
|
||||
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
|
||||
UNION
|
||||
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
|
||||
UNION
|
||||
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('ETL key distribution:');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN distinct keys: ' || vAdminKeys);
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT distinct keys: ' || vContentKeys);
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION distinct keys: ' || vCriterionKeys);
|
||||
DBMS_OUTPUT.PUT_LINE('- Total distinct ETL keys: ' || vTotalKeys);
|
||||
|
||||
IF vTotalKeys > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: ETL key distribution looks normal');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: No ETL keys found in source data');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 3: Template table compatibility verification
|
||||
PROMPT Checking template table compatibility...
|
||||
DECLARE
|
||||
vAdminCols NUMBER := 0;
|
||||
vContentCols NUMBER := 0;
|
||||
vCriterionCols NUMBER := 0;
|
||||
BEGIN
|
||||
-- Check MPEC_ADMIN template compatibility
|
||||
SELECT COUNT(*)
|
||||
INTO vAdminCols
|
||||
FROM all_tab_columns
|
||||
WHERE owner = 'CT_ET_TEMPLATES'
|
||||
AND table_name = 'C2D_MPEC_ADMIN';
|
||||
|
||||
-- Check MPEC_CONTENT template compatibility
|
||||
SELECT COUNT(*)
|
||||
INTO vContentCols
|
||||
FROM all_tab_columns
|
||||
WHERE owner = 'CT_ET_TEMPLATES'
|
||||
AND table_name = 'C2D_MPEC_CONTENT';
|
||||
|
||||
-- Check MPEC_CONTENT_CRITERION template compatibility
|
||||
SELECT COUNT(*)
|
||||
INTO vCriterionCols
|
||||
FROM all_tab_columns
|
||||
WHERE owner = 'CT_ET_TEMPLATES'
|
||||
AND table_name = 'C2D_MPEC_CONTENT_CRITERION';
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Template table column counts:');
|
||||
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_ADMIN: ' || vAdminCols || ' columns');
|
||||
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT: ' || vContentCols || ' columns');
|
||||
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT_CRITERION: ' || vCriterionCols || ' columns');
|
||||
|
||||
IF vAdminCols > 0 AND vContentCols > 0 AND vCriterionCols > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All template tables have defined structure');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: One or more template tables missing columns');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 4: Verify A_ETL_LOAD_SET_FK values exist in A_LOAD_HISTORY
|
||||
PROMPT Checking ETL key references in A_LOAD_HISTORY...
|
||||
DECLARE
|
||||
vValidKeys NUMBER := 0;
|
||||
vTotalSourceKeys NUMBER := 0;
|
||||
BEGIN
|
||||
-- Count total distinct ETL keys in source tables
|
||||
SELECT COUNT(DISTINCT etl_key)
|
||||
INTO vTotalSourceKeys
|
||||
FROM (
|
||||
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
|
||||
UNION
|
||||
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
|
||||
UNION
|
||||
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
|
||||
);
|
||||
|
||||
-- Count how many exist in A_LOAD_HISTORY
|
||||
SELECT COUNT(DISTINCT etl_key)
|
||||
INTO vValidKeys
|
||||
FROM (
|
||||
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
|
||||
UNION
|
||||
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
|
||||
UNION
|
||||
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
|
||||
) src
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM CT_ODS.A_LOAD_HISTORY h
|
||||
WHERE h.A_ETL_LOAD_SET_KEY = src.etl_key
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('ETL key validation:');
|
||||
DBMS_OUTPUT.PUT_LINE('- Total distinct ETL keys in source: ' || vTotalSourceKeys);
|
||||
DBMS_OUTPUT.PUT_LINE('- Valid keys (exist in A_LOAD_HISTORY): ' || vValidKeys);
|
||||
|
||||
IF vValidKeys = vTotalSourceKeys THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All source ETL keys are valid');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Some ETL keys may be invalid: ' || (vTotalSourceKeys - vValidKeys));
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-956 Record Count Verification
|
||||
PROMPT =====================================================================================
|
||||
PROMPT Comparing source table counts with exported external table counts
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
TYPE t_table_info IS RECORD (
|
||||
source_schema VARCHAR2(50),
|
||||
source_table VARCHAR2(100),
|
||||
external_table VARCHAR2(100),
|
||||
description VARCHAR2(200)
|
||||
);
|
||||
TYPE t_table_list IS TABLE OF t_table_info;
|
||||
|
||||
vTables t_table_list;
|
||||
vSourceCount NUMBER;
|
||||
vTargetCount NUMBER;
|
||||
vTotalSourceCount NUMBER := 0;
|
||||
vTotalTargetCount NUMBER := 0;
|
||||
vMismatchCount NUMBER := 0;
|
||||
vSql VARCHAR2(4000);
|
||||
vFileCount NUMBER := 0;
|
||||
vValidationResult VARCHAR2(100);
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize table list with C2D MPEC configuration
|
||||
vTables := t_table_list(
|
||||
t_table_info('OU_LEGACY_C2D', 'MPEC_ADMIN', 'ODS.C2D_MPEC_ADMIN_ODS', 'MPEC Admin data (ETL keys 2001-2005)'),
|
||||
t_table_info('OU_LEGACY_C2D', 'MPEC_CONTENT', 'ODS.C2D_MPEC_CONTENT_ODS', 'MPEC Content data (ETL keys 2006-2008)'),
|
||||
t_table_info('OU_LEGACY_C2D', 'MPEC_CONTENT_CRITERION', 'ODS.C2D_MPEC_CONTENT_CRITERION_ODS', 'MPEC Criterion data (ETL keys 2009-2010)')
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('Table Name Source Count Target Count Status');
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
|
||||
FOR i IN 1..vTables.COUNT LOOP
|
||||
-- Get source table count
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).source_schema || '.' || vTables(i).source_table;
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vSourceCount;
|
||||
vTotalSourceCount := vTotalSourceCount + vSourceCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
vSourceCount := -1;
|
||||
DBMS_OUTPUT.PUT_LINE(RPAD(vTables(i).source_table, 24) || 'ERROR: Cannot access source table');
|
||||
CONTINUE;
|
||||
END;
|
||||
|
||||
-- Get target external table count
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).external_table;
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vTargetCount;
|
||||
vTotalTargetCount := vTotalTargetCount + vTargetCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
-- Handle expected errors for empty external tables
|
||||
-- ORA-29913: error in executing ODCIEXTTABLEOPEN callout
|
||||
-- ORA-29400: data cartridge error
|
||||
-- KUP-13023: nothing matched wildcard query (no files in bucket)
|
||||
-- NOTE: ORA-30653 (reject limit) is a real data quality error, not treated as empty
|
||||
IF vSourceCount = 0 OR SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||
vTargetCount := 0; -- Treat as empty (no files exported yet)
|
||||
ELSE
|
||||
vTargetCount := -1; -- Real error
|
||||
END IF;
|
||||
END;
|
||||
|
||||
-- Display comparison results with thousands separators
|
||||
DECLARE
|
||||
vStatus VARCHAR2(20);
|
||||
vSourceDisplay VARCHAR2(17);
|
||||
vTargetDisplay VARCHAR2(17);
|
||||
BEGIN
|
||||
-- Format source count display
|
||||
IF vSourceCount = -1 THEN
|
||||
vSourceDisplay := 'ERROR';
|
||||
ELSE
|
||||
vSourceDisplay := TO_CHAR(vSourceCount, '9,999,999,999');
|
||||
END IF;
|
||||
|
||||
-- Format target count display
|
||||
IF vTargetCount = -1 THEN
|
||||
vTargetDisplay := 'ERROR';
|
||||
ELSE
|
||||
vTargetDisplay := TO_CHAR(vTargetCount, '9,999,999,999');
|
||||
END IF;
|
||||
|
||||
-- Determine status
|
||||
IF vSourceCount = vTargetCount THEN
|
||||
vStatus := 'PASS';
|
||||
ELSIF vTargetCount = -1 THEN
|
||||
vStatus := 'ERROR';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
ELSIF vSourceCount = -1 THEN
|
||||
vStatus := 'ERROR';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
ELSE
|
||||
vStatus := 'MISMATCH';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE(
|
||||
RPAD(vTables(i).source_table, 24) ||
|
||||
LPAD(vSourceDisplay, 15) ||
|
||||
LPAD(vTargetDisplay, 15) || ' ' ||
|
||||
vStatus
|
||||
);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE(
|
||||
RPAD('TOTALS', 24) ||
|
||||
LPAD(TO_CHAR(vTotalSourceCount, '9,999,999,999'), 15) ||
|
||||
LPAD(TO_CHAR(vTotalTargetCount, '9,999,999,999'), 15)
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Count exported files for additional verification
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE RECEPTION_DATE >= SYSDATE - 1/24
|
||||
AND (SOURCE_FILE_NAME LIKE '200_%');
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '9,999,999,999'));
|
||||
DBMS_OUTPUT.PUT_LINE('Total target records: ' || TO_CHAR(vTotalTargetCount, '9,999,999,999') || ' (exported to ODS)');
|
||||
DBMS_OUTPUT.PUT_LINE('Export files registered: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vMismatchCount = 0 AND vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[PASS] VERIFICATION PASSED');
|
||||
DBMS_OUTPUT.PUT_LINE(' All record counts match between source and exported data');
|
||||
DBMS_OUTPUT.PUT_LINE(' Export completed successfully');
|
||||
ELSIF vMismatchCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[INFO] VERIFICATION COMPLETED WITH MISMATCHES');
|
||||
DBMS_OUTPUT.PUT_LINE(' Found ' || vMismatchCount || ' table(s) with count mismatches');
|
||||
DBMS_OUTPUT.PUT_LINE(' NOTE: Mismatches may be caused by pre-existing files in buckets (see pre-check)');
|
||||
DBMS_OUTPUT.PUT_LINE(' Review export logs and pre-check results before re-running exports');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[WARN] NO EXPORT DETECTED');
|
||||
DBMS_OUTPUT.PUT_LINE(' No files found in export registration');
|
||||
DBMS_OUTPUT.PUT_LINE(' Verify export execution completed successfully');
|
||||
END IF;
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Legend:');
|
||||
DBMS_OUTPUT.PUT_LINE(' PASS - Record counts match (export successful)');
|
||||
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
||||
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (verify table exists and permissions)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Additional ETL key analysis for C2D MPEC data
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('ETL Key Analysis:');
|
||||
|
||||
DECLARE
|
||||
vAdminKeys NUMBER;
|
||||
vContentKeys NUMBER;
|
||||
vCriterionKeys NUMBER;
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminKeys;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentKeys;
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(DISTINCT A_ETL_LOAD_SET_FK) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionKeys;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN distinct ETL keys: ' || vAdminKeys || ' (expected: 3 for keys 2001-2005)');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT distinct ETL keys: ' || vContentKeys || ' (expected: 3 for keys 2006-2008)');
|
||||
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION distinct ETL keys: ' || vCriterionKeys || ' (expected: 2 for keys 2009-2010)');
|
||||
|
||||
-- Expected file count = sum of distinct ETL keys per table
|
||||
DBMS_OUTPUT.PUT_LINE('- Expected export files: ' || (vAdminKeys + vContentKeys + vCriterionKeys));
|
||||
DBMS_OUTPUT.PUT_LINE('- Actual export files: ' || vFileCount);
|
||||
END;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT Data Integrity Verification Completed
|
||||
PROMPT =========================================================================
|
||||
@@ -0,0 +1,167 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-956 ROLLBACK: Delete Exported CSV Files from DATA Bucket
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Delete exported CSV files from ODS/C2D bucket folders for MPEC tables
|
||||
-- WARNING: This will permanently delete exported data files!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-12
|
||||
-- Related: MARS-956 - C2D MPEC Data Export Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting C2D_MPEC_ADMIN exported files
|
||||
PROMPT ========================================================================
|
||||
PROMPT WARNING: This will delete files from:
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/C2D/C2D_MPEC_ADMIN/
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting C2D_MPEC_ADMIN files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-956''');
|
||||
|
||||
-- Delete CSV files registered by MARS-956 process
|
||||
FOR rec IN (
|
||||
SELECT SOURCE_FILE_NAME AS object_name
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND SOURCE_FILE_NAME LIKE '%MPEC_ADMIN%'
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vDataBucketUri || 'ODS/C2D/C2D_MPEC_ADMIN/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' INFO: No files found to delete');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: C2D_MPEC_ADMIN files deleted (' || vFileCount || ' file(s))');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting C2D_MPEC_CONTENT exported files
|
||||
PROMPT ========================================================================
|
||||
PROMPT WARNING: This will delete files from:
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT/
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting C2D_MPEC_CONTENT files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-956''');
|
||||
|
||||
-- Delete CSV files registered by MARS-956 process
|
||||
FOR rec IN (
|
||||
SELECT SOURCE_FILE_NAME AS object_name
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND SOURCE_FILE_NAME LIKE '%MPEC_CONTENT%'
|
||||
AND SOURCE_FILE_NAME NOT LIKE '%CRITERION%'
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vDataBucketUri || 'ODS/C2D/C2D_MPEC_CONTENT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' INFO: No files found to delete');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: C2D_MPEC_CONTENT files deleted (' || vFileCount || ' file(s))');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting C2D_MPEC_CONTENT_CRITERION exported files
|
||||
PROMPT ========================================================================
|
||||
PROMPT WARNING: This will delete files from:
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT_CRITERION/
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting C2D_MPEC_CONTENT_CRITERION files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-956''');
|
||||
|
||||
-- Delete CSV files registered by MARS-956 process
|
||||
FOR rec IN (
|
||||
SELECT SOURCE_FILE_NAME AS object_name
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND SOURCE_FILE_NAME LIKE '%MPEC_CONTENT_CRITERION%'
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vDataBucketUri || 'ODS/C2D/C2D_MPEC_CONTENT_CRITERION/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' INFO: No files found to delete');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: C2D_MPEC_CONTENT_CRITERION files deleted (' || vFileCount || ' file(s))');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT SUCCESS: All CSV file deletion operations completed
|
||||
@@ -0,0 +1,78 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 Rollback Step 1: Delete File Registrations
|
||||
-- ===================================================================
|
||||
-- Purpose: Remove MARS-956 export file registrations from A_SOURCE_FILE_RECEIVED
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-12
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Rollback Step 1: Delete File Registrations
|
||||
PROMPT =========================================================================
|
||||
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vErrorMsg VARCHAR2(4000);
|
||||
BEGIN
|
||||
-- Count files to be deleted (using PROCESS_NAME)
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-956';
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Files to be deleted: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('Using PROCESS_NAME = ''MARS-956'' filter');
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
-- Show files before deletion
|
||||
DBMS_OUTPUT.PUT_LINE('Files being removed:');
|
||||
FOR rec IN (
|
||||
SELECT A_SOURCE_FILE_RECEIVED_KEY,
|
||||
SUBSTR(SOURCE_FILE_NAME, 1, 60) AS FILE_NAME,
|
||||
TO_CHAR(RECEPTION_DATE, 'YYYY-MM-DD HH24:MI:SS') AS RECEIVED_TIME,
|
||||
PROCESS_NAME
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
ORDER BY RECEPTION_DATE DESC
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('- ' || rec.FILE_NAME || ' (ID: ' || rec.A_SOURCE_FILE_RECEIVED_KEY || ', Process: ' || rec.PROCESS_NAME || ')');
|
||||
END LOOP;
|
||||
|
||||
-- Delete the file registrations using PROCESS_NAME
|
||||
DELETE FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-956';
|
||||
|
||||
vDeletedCount := SQL%ROWCOUNT;
|
||||
COMMIT;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Successfully deleted ' || vDeletedCount || ' file registrations');
|
||||
|
||||
-- Log the rollback action
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||
VALUES ('MARS-956-ROLLBACK', 'DELETE_FILE_REGISTRATIONS', 'INFO',
|
||||
'Deleted ' || vDeletedCount || ' file registrations');
|
||||
COMMIT;
|
||||
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: No file registrations found to delete');
|
||||
END IF;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
ROLLBACK;
|
||||
vErrorMsg := 'Failed to delete file registrations: ' || SQLERRM;
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Error during file registration deletion: ' || SQLERRM);
|
||||
-- Log the error
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||
VALUES ('MARS-956-ROLLBACK', 'DELETE_FILE_REGISTRATIONS', 'ERROR', vErrorMsg);
|
||||
COMMIT;
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT File Registration Rollback Completed
|
||||
PROMPT =========================================================================
|
||||
@@ -0,0 +1,77 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 Rollback Step 2: Clean Process Logs
|
||||
-- ===================================================================
|
||||
-- Purpose: Remove MARS-956 process logs from A_PROCESS_LOG
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-12
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Rollback Step 2: Clean Process Logs
|
||||
PROMPT =========================================================================
|
||||
|
||||
DECLARE
|
||||
vLogCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vErrorMsg VARCHAR2(4000);
|
||||
BEGIN
|
||||
-- Count logs to be deleted
|
||||
SELECT COUNT(*)
|
||||
INTO vLogCount
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME IN ('MARS-956', 'MARS-956-ROLLBACK')
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 7; -- Last week (safety)
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Process log entries to be deleted: ' || vLogCount);
|
||||
|
||||
IF vLogCount > 0 THEN
|
||||
-- Show recent logs before deletion
|
||||
DBMS_OUTPUT.PUT_LINE('Recent MARS-956 log entries being removed:');
|
||||
FOR rec IN (
|
||||
SELECT A_PROCESS_LOG_KEY,
|
||||
TO_CHAR(LOG_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS LOG_TIME,
|
||||
PROCEDURE_NAME,
|
||||
LOG_LEVEL,
|
||||
SUBSTR(LOG_MESSAGE, 1, 40) AS MESSAGE
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME IN ('MARS-956', 'MARS-956-ROLLBACK')
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 7
|
||||
ORDER BY LOG_TIMESTAMP DESC
|
||||
FETCH FIRST 10 ROWS ONLY
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('- ' || rec.LOG_TIME || ' [' || rec.LOG_LEVEL || '] ' ||
|
||||
rec.PROCEDURE_NAME || ': ' || rec.MESSAGE);
|
||||
END LOOP;
|
||||
|
||||
-- Delete the process logs
|
||||
DELETE FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME IN ('MARS-956', 'MARS-956-ROLLBACK')
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 7;
|
||||
|
||||
vDeletedCount := SQL%ROWCOUNT;
|
||||
COMMIT;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Successfully deleted ' || vDeletedCount || ' process log entries');
|
||||
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: No process log entries found to delete');
|
||||
END IF;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
ROLLBACK;
|
||||
vErrorMsg := 'Failed to clean process logs: ' || SQLERRM;
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Error during process log cleanup: ' || SQLERRM);
|
||||
-- Log the error (will remain after rollback for debugging)
|
||||
INSERT INTO CT_MRDS.A_PROCESS_LOG (PROCESS_NAME, PROCEDURE_NAME, LOG_LEVEL, LOG_MESSAGE)
|
||||
VALUES ('MARS-956-ROLLBACK', 'CLEANUP_PROCESS_LOGS', 'ERROR', vErrorMsg);
|
||||
COMMIT;
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT Process Log Cleanup Completed
|
||||
PROMPT =========================================================================
|
||||
@@ -0,0 +1,207 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 Rollback Verification: Confirm Rollback Completion
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify that MARS-956 rollback completed successfully
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-12
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Rollback Verification
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Check 1: Verify file registrations were removed
|
||||
PROMPT Checking file registration cleanup...
|
||||
DECLARE
|
||||
vRemainingFiles NUMBER := 0;
|
||||
BEGIN
|
||||
SELECT COUNT(*)
|
||||
INTO vRemainingFiles
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE SOURCE_FILE_NAME LIKE '200%' -- ETL keys 2001-2010
|
||||
AND RECEPTION_DATE >= SYSDATE - 7; -- Last week
|
||||
|
||||
IF vRemainingFiles = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All MARS-956 file registrations successfully removed');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vRemainingFiles || ' file registrations still exist');
|
||||
|
||||
-- Show remaining files
|
||||
FOR rec IN (
|
||||
SELECT SUBSTR(SOURCE_FILE_NAME, 1, 50) AS FILE_NAME,
|
||||
TO_CHAR(RECEPTION_DATE, 'YYYY-MM-DD HH24:MI:SS') AS RECEIVED_TIME
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE SOURCE_FILE_NAME LIKE '200%'
|
||||
AND RECEPTION_DATE >= SYSDATE - 7
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' Remaining: ' || rec.FILE_NAME);
|
||||
END LOOP;
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 2: Verify process logs were cleaned
|
||||
PROMPT Checking process log cleanup...
|
||||
DECLARE
|
||||
vRemainingLogs NUMBER := 0;
|
||||
BEGIN
|
||||
SELECT COUNT(*)
|
||||
INTO vRemainingLogs
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 7; -- Last week
|
||||
|
||||
IF vRemainingLogs = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: All MARS-956 process logs successfully removed');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: ' || vRemainingLogs || ' process log entries still exist');
|
||||
|
||||
-- Show remaining logs (first few)
|
||||
FOR rec IN (
|
||||
SELECT TO_CHAR(LOG_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS LOG_TIME,
|
||||
PROCEDURE_NAME,
|
||||
SUBSTR(LOG_MESSAGE, 1, 40) AS MESSAGE
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 7
|
||||
ORDER BY LOG_TIMESTAMP DESC
|
||||
FETCH FIRST 3 ROWS ONLY
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' Remaining: ' || rec.LOG_TIME || ' ' || rec.PROCEDURE_NAME);
|
||||
END LOOP;
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 3: Verify cloud bucket cleanup (informational only)
|
||||
PROMPT Checking cloud bucket status...
|
||||
DECLARE
|
||||
vCloudFileCount NUMBER := 0;
|
||||
vCredentialName VARCHAR2(100);
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ODS');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Checking ODS bucket: ' || vDataBucketUri);
|
||||
|
||||
-- Count remaining files in cloud bucket
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri
|
||||
))
|
||||
WHERE object_name LIKE 'ODS/C2D/C2D_MPEC_%'
|
||||
) LOOP
|
||||
vCloudFileCount := vCloudFileCount + 1;
|
||||
IF vCloudFileCount <= 3 THEN -- Show first 3 files
|
||||
DBMS_OUTPUT.PUT_LINE(' Cloud file: ' || rec.object_name);
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
IF vCloudFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: No C2D MPEC files found in cloud bucket');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: ' || vCloudFileCount || ' C2D MPEC files still in cloud bucket');
|
||||
DBMS_OUTPUT.PUT_LINE(' Note: Cloud files are not automatically deleted by rollback');
|
||||
DBMS_OUTPUT.PUT_LINE(' Manual deletion required if needed');
|
||||
END IF;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot check cloud bucket: ' || SQLERRM);
|
||||
END;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Check 4: Verify rollback logs were created
|
||||
PROMPT Checking rollback operation logs...
|
||||
DECLARE
|
||||
vRollbackLogs NUMBER := 0;
|
||||
BEGIN
|
||||
SELECT COUNT(*)
|
||||
INTO vRollbackLogs
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956-ROLLBACK'
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 1/24; -- Last hour
|
||||
|
||||
IF vRollbackLogs > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Rollback operation logs found: ' || vRollbackLogs);
|
||||
|
||||
-- Show recent rollback logs
|
||||
FOR rec IN (
|
||||
SELECT TO_CHAR(LOG_TIMESTAMP, 'HH24:MI:SS') AS LOG_TIME,
|
||||
PROCEDURE_NAME,
|
||||
LOG_LEVEL,
|
||||
SUBSTR(LOG_MESSAGE, 1, 50) AS MESSAGE
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956-ROLLBACK'
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 1/24
|
||||
ORDER BY LOG_TIMESTAMP DESC
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || rec.LOG_TIME || ' [' || rec.LOG_LEVEL || '] ' ||
|
||||
rec.PROCEDURE_NAME || ': ' || rec.MESSAGE);
|
||||
END LOOP;
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Warning: No rollback operation logs found');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Rollback Verification Summary
|
||||
PROMPT =========================================================================
|
||||
|
||||
DECLARE
|
||||
vRemainingFiles NUMBER := 0;
|
||||
vRemainingLogs NUMBER := 0;
|
||||
vRollbackStatus VARCHAR2(20);
|
||||
BEGIN
|
||||
-- Count remaining registrations
|
||||
SELECT COUNT(*)
|
||||
INTO vRemainingFiles
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE SOURCE_FILE_NAME LIKE '200%'
|
||||
AND RECEPTION_DATE >= SYSDATE - 7;
|
||||
|
||||
-- Count remaining process logs
|
||||
SELECT COUNT(*)
|
||||
INTO vRemainingLogs
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PROCESS_NAME = 'MARS-956'
|
||||
AND LOG_TIMESTAMP >= SYSDATE - 7;
|
||||
|
||||
-- Determine rollback status
|
||||
IF vRemainingFiles = 0 AND vRemainingLogs = 0 THEN
|
||||
vRollbackStatus := 'COMPLETE';
|
||||
ELSIF vRemainingFiles = 0 OR vRemainingLogs = 0 THEN
|
||||
vRollbackStatus := 'PARTIAL';
|
||||
ELSE
|
||||
vRollbackStatus := 'INCOMPLETE';
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('MARS-956 Rollback Status: ' || vRollbackStatus);
|
||||
DBMS_OUTPUT.PUT_LINE('- Remaining file registrations: ' || vRemainingFiles);
|
||||
DBMS_OUTPUT.PUT_LINE('- Remaining process logs: ' || vRemainingLogs);
|
||||
|
||||
IF vRollbackStatus = 'COMPLETE' THEN
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Rollback completed successfully - system clean');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Rollback incomplete - manual cleanup may be required');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Note: Cloud bucket files (OCI) are not automatically removed');
|
||||
DBMS_OUTPUT.PUT_LINE(' Use OCI console or DBMS_CLOUD commands for file deletion if needed');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT Rollback Verification Completed
|
||||
PROMPT =========================================================================
|
||||
@@ -1,68 +0,0 @@
|
||||
# MARS-956: Exporting Historical data for ODS: C2D MPEC (delta)
|
||||
|
||||
## Overview
|
||||
|
||||
**Purpose**: One-time export of historical C2D MPEC delta data from operational database (OU_C2D) to DATA bucket as CSV files.
|
||||
|
||||
**Approach**: Use DATA_EXPORTER export functionality EXPORT_TABLE_DATA for bulk data movement with file registration.
|
||||
|
||||
**Input**: Old tables in OU_C2D operational database
|
||||
**Output**: CSV files in DATA bucket
|
||||
**Mapping**: Structure must match new ODS template tables
|
||||
|
||||
## Tables to Export
|
||||
|
||||
| Source Table (OU_C2D) | Target Location (DATA) | Export Type | Time Dependency |
|
||||
|------------------------|-------------------------|-------------|------------------|
|
||||
| `MPEC_ADMIN` | `mrds_data_dev/DATA/C2D/C2D_MPEC_ADMIN` | CSV to DATA | Sync with REL_02 |
|
||||
| `MPEC_CONTENT` | `mrds_data_dev/DATA/C2D/C2D_MPEC_CONTENT` | CSV to DATA | Sync with REL_02 |
|
||||
| `MPEC_CONTENT_CRITERION` | `mrds_data_dev/DATA/C2D/C2D_MPEC_CONTENT_CRITERION` | CSV to DATA | Sync with REL_02 |
|
||||
|
||||
## Export Strategy
|
||||
|
||||
- **Format**: CSV files in DATA bucket
|
||||
- **Reason**: Complete history of delta records needed for all queries
|
||||
- **Method**: `DATA_EXPORTER.EXPORT_TABLE_DATA` procedure
|
||||
- **Bucket Area**: `'DATA'`
|
||||
- **Folder Structure**: `'DATA/C2D/{TABLE_NAME}'`
|
||||
- **File Registration**: Files registered in A_SOURCE_FILE_RECEIVED table
|
||||
|
||||
## Installation Steps
|
||||
|
||||
1. Run master install script: `@install_mars956.sql`
|
||||
2. Verify exports completed successfully
|
||||
3. Confirm CSV files created in DATA bucket with expected structure
|
||||
|
||||
## Files Structure
|
||||
|
||||
```
|
||||
MARS-956/
|
||||
├── README.md # This file
|
||||
├── install_mars956.sql # Master installation script
|
||||
├── 01_MARS_956_export_c2d_mpec_data.sql # Export procedures execution
|
||||
├── track_package_versions.sql # Universal version tracking
|
||||
├── verify_packages_version.sql # Universal version verification
|
||||
└── rollback_mars956.sql # Rollback script (if needed)
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- OU_C2D schema access for source tables
|
||||
- DATA_EXPORTER package v2.7.5+ deployed (with pRegisterExport support)
|
||||
- DEF_CRED_ARN credentials configured
|
||||
- DATA bucket accessible
|
||||
|
||||
## Post-Installation Verification
|
||||
|
||||
1. Check export completion in A_PROCESS_LOG
|
||||
2. Verify CSV files created in DATA bucket
|
||||
3. Validate file structure matches template tables
|
||||
4. Confirm row counts match source tables
|
||||
5. Check file registration in A_SOURCE_FILE_RECEIVED table
|
||||
|
||||
## Notes
|
||||
|
||||
- This is a **one-time** data migration
|
||||
- No package modifications required (uses existing DATA_EXPORTER)
|
||||
- Export timing critical - must sync with REL_02 deployment
|
||||
- Complete history required for delta queries
|
||||
@@ -1,128 +1,88 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 MASTER INSTALLATION SCRIPT
|
||||
-- MARS-956 INSTALL SCRIPT: C2D MPEC Data Export to External Tables
|
||||
-- ===================================================================
|
||||
-- Purpose: Export Historical C2D MPEC data from OU_C2D to DATA bucket
|
||||
-- Purpose: One-time bulk export of 3 C2D MPEC tables from OU_LEGACY_C2D schema
|
||||
-- to OCI buckets (ODS bucket CSV format)
|
||||
-- Uses DATA_EXPORTER v2.7.5 with pRegisterExport for file registration
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-11
|
||||
--
|
||||
-- Requirements:
|
||||
-- - ADMIN user access for MARS installation
|
||||
-- - OU_C2D schema access for source tables
|
||||
-- - DATA_EXPORTER package v2.7.4+ deployed
|
||||
-- - DEF_CRED_ARN credentials configured
|
||||
-- - DATA bucket accessible
|
||||
-- ===================================================================
|
||||
-- Date: 2026-02-12
|
||||
|
||||
-- Dynamic spool file generation
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- Log files are automatically created in log/ subdirectory
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
define spoolfile = 'log\install_mars956_'
|
||||
define timestamp = ''
|
||||
|
||||
-- Get current timestamp for unique log filename
|
||||
column current_time new_value timestamp
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYYMMDD_HH24MISS') AS current_time FROM dual;
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/INSTALL_MARS_956_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
-- Start logging
|
||||
spool &spoolfile.×tamp..log
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
-- Display environment information
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 INSTALLATION - Export Historical C2D MPEC Data
|
||||
PROMPT =========================================================================
|
||||
PROMPT Installation Start:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS INSTALL_START FROM DUAL;
|
||||
|
||||
PROMPT Current User:
|
||||
SELECT USER AS CURRENT_USER FROM DUAL;
|
||||
|
||||
PROMPT Database Info:
|
||||
SELECT INSTANCE_NAME, VERSION, STATUS FROM V$INSTANCE;
|
||||
-- Set current schema context (optional - use when modifying packages in specific schema)
|
||||
-- ALTER SESSION SET CURRENT_SCHEMA = CT_MRDS;
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT Installation Details:
|
||||
PROMPT - Purpose: One-time export of historical C2D MPEC delta data
|
||||
PROMPT - Source: OU_C2D schema tables (operational database)
|
||||
PROMPT - Target: DATA bucket as CSV files
|
||||
PROMPT - Tables: MPEC_ADMIN, MPEC_CONTENT, MPEC_CONTENT_CRITERION
|
||||
PROMPT - Method: DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE
|
||||
PROMPT MARS-956: C2D MPEC Data Export to External Tables (One-Time Migration)
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will export 3 C2D MPEC tables to OCI buckets:
|
||||
PROMPT
|
||||
PROMPT TARGET: ODS Bucket (CSV format):
|
||||
PROMPT - MPEC_ADMIN
|
||||
PROMPT - MPEC_CONTENT
|
||||
PROMPT - MPEC_CONTENT_CRITERION
|
||||
PROMPT
|
||||
PROMPT Key Features:
|
||||
PROMPT - Files registered in A_SOURCE_FILE_RECEIVED for tracking
|
||||
PROMPT - Template table column order matching (CT_ET_TEMPLATES.C2D_MPEC_*)
|
||||
PROMPT - ODS/C2D bucket path structure
|
||||
PROMPT =========================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE 1000000
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
-- Confirm installation with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with installation, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Installation aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 1: Verify Prerequisites
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Verify DATA_EXPORTER package is available
|
||||
PROMPT Checking DATA_EXPORTER package availability...
|
||||
SELECT 'DATA_EXPORTER v' || CT_MRDS.DATA_EXPORTER.PACKAGE_VERSION ||
|
||||
' (Build: ' || CT_MRDS.DATA_EXPORTER.PACKAGE_BUILD_DATE || ')' AS PACKAGE_INFO
|
||||
FROM DUAL;
|
||||
|
||||
-- Verify source tables exist in OU_C2D
|
||||
PROMPT Checking source tables in OU_C2D schema...
|
||||
SELECT table_name, num_rows
|
||||
FROM all_tables
|
||||
WHERE owner = 'OU_C2D'
|
||||
AND table_name IN ('MPEC_ADMIN', 'MPEC_CONTENT', 'MPEC_CONTENT_CRITERION')
|
||||
ORDER BY table_name;
|
||||
|
||||
-- Verify template tables exist in CT_ET_TEMPLATES
|
||||
PROMPT Checking template tables in CT_ET_TEMPLATES schema...
|
||||
SELECT table_name
|
||||
FROM all_tables
|
||||
WHERE owner = 'CT_ET_TEMPLATES'
|
||||
AND table_name IN ('C2D_MPEC_ADMIN', 'C2D_MPEC_CONTENT', 'C2D_MPEC_CONTENT_CRITERION')
|
||||
ORDER BY table_name;
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2: Execute Historical Data Export
|
||||
PROMPT Step 1: Export C2D MPEC Data to ODS Bucket
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_956_export_c2d_mpec_data.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 3: Track Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 4: Verify Package Versions
|
||||
PROMPT Step 2: Verify Exports (File Registration Check)
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
@@02_MARS_956_verify_exports.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 INSTALLATION SUMMARY
|
||||
PROMPT Step 3: Verify Data Integrity (Source vs Exported)
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Display final summary
|
||||
PROMPT Installation Completed:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS INSTALL_END FROM DUAL;
|
||||
|
||||
PROMPT Export Results Summary:
|
||||
SELECT COUNT(*) AS EXPORT_LOG_ENTRIES,
|
||||
MIN(EVENT_TIMESTAMP) AS FIRST_EXPORT,
|
||||
MAX(EVENT_TIMESTAMP) AS LAST_EXPORT
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PACKAGE_NAME = 'MARS-956'
|
||||
AND EVENT_TIMESTAMP >= SYSDATE - 1; -- Last 24 hours
|
||||
@@03_MARS_956_verify_data_integrity.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT POST-INSTALLATION TASKS
|
||||
PROMPT MARS-956 Installation - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT 1. Verify CSV files created in DATA bucket:
|
||||
PROMPT - mrds_data_dev/ODS/C2D/C2D_MPEC_ADMIN/*.csv
|
||||
PROMPT - mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT/*.csv
|
||||
PROMPT - mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT_CRITERION/*.csv
|
||||
PROMPT
|
||||
PROMPT 2. Check file structure matches template tables
|
||||
PROMPT 3. Validate row counts match source tables
|
||||
PROMPT 4. Confirm data available for delta queries
|
||||
PROMPT 5. Sync deployment timing with REL_02 deployment
|
||||
PROMPT Check the log file for complete installation details.
|
||||
PROMPT For rollback, use: rollback_mars956.sql
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -1,85 +1,81 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 ROLLBACK SCRIPT
|
||||
-- MARS-956 ROLLBACK SCRIPT: C2D MPEC Data Export Rollback
|
||||
-- ===================================================================
|
||||
-- Purpose: Rollback/cleanup for MARS-956 C2D MPEC historical data export
|
||||
-- Purpose: Rollback MARS-956 - Delete exported CSV files and file registrations
|
||||
-- WARNING: This will DELETE all exported data files and registrations!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-11
|
||||
--
|
||||
-- NOTE: This is primarily for cleanup of log entries and tracking data.
|
||||
-- The exported CSV files would need to be manually removed from
|
||||
-- the DATA bucket if rollback is required.
|
||||
-- ===================================================================
|
||||
-- Date: 2026-02-12
|
||||
|
||||
-- Start logging
|
||||
spool rollback_mars956.log
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/ROLLBACK_MARS_956_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 ROLLBACK - Cleanup Historical C2D MPEC Export
|
||||
PROMPT MARS-956: Rollback C2D MPEC Data Export
|
||||
PROMPT =========================================================================
|
||||
PROMPT Rollback Start:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS ROLLBACK_START FROM DUAL;
|
||||
|
||||
SET SERVEROUTPUT ON SIZE 1000000
|
||||
|
||||
PROMPT WARNING: This will DELETE exported CSV files and file registrations!
|
||||
PROMPT - ODS bucket: mrds_data_dev/ODS/C2D/
|
||||
PROMPT - File registrations: A_SOURCE_FILE_RECEIVED entries
|
||||
PROMPT
|
||||
PROMPT Step 1: Review Export Activity
|
||||
PROMPT Only proceed if export failed and needs to be restarted!
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Show what was exported
|
||||
PROMPT Recent MARS-956 export activity:
|
||||
SELECT TO_CHAR(EVENT_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS EXPORT_TIME,
|
||||
PROCEDURE_NAME,
|
||||
EVENT_TYPE,
|
||||
EVENT_MESSAGE
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PACKAGE_NAME = 'MARS-956'
|
||||
OR PROCEDURE_NAME LIKE '%MARS_956%'
|
||||
ORDER BY EVENT_TIMESTAMP DESC;
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2: Cleanup Log Entries (Optional)
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Optionally remove MARS-956 log entries (uncomment if needed)
|
||||
/*
|
||||
DELETE FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PACKAGE_NAME = 'MARS-956'
|
||||
OR PROCEDURE_NAME LIKE '%MARS_956%';
|
||||
|
||||
PROMPT Deleted log entries:
|
||||
SELECT SQL%ROWCOUNT AS DELETED_ROWS FROM DUAL;
|
||||
|
||||
COMMIT;
|
||||
*/
|
||||
|
||||
PROMPT Log cleanup skipped (uncomment DELETE statement if cleanup needed)
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 3: Manual Steps Required
|
||||
PROMPT =========================================================================
|
||||
|
||||
PROMPT ⚠️ MANUAL CLEANUP REQUIRED:
|
||||
PROMPT
|
||||
PROMPT If complete rollback is needed, manually remove CSV files from DATA bucket:
|
||||
PROMPT - mrds_data_dev/ODS/C2D/C2D_MPEC_ADMIN/*.csv
|
||||
PROMPT - mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT/*.csv
|
||||
PROMPT - mrds_data_dev/ODS/C2D/C2D_MPEC_CONTENT_CRITERION/*.csv
|
||||
PROMPT
|
||||
PROMPT Use OCI CLI or console to remove files:
|
||||
PROMPT oci os object list --bucket-name mrds_data_dev --prefix "ODS/C2D/C2D_MPEC"
|
||||
PROMPT oci os object delete --bucket-name mrds_data_dev --name "path/to/file.csv"
|
||||
-- Confirm rollback with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with rollback, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Rollback aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 ROLLBACK SUMMARY
|
||||
PROMPT Step 1: Delete Exported CSV Files from DATA Bucket
|
||||
PROMPT =========================================================================
|
||||
|
||||
PROMPT Rollback Completed:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS ROLLBACK_END FROM DUAL;
|
||||
@@90_MARS_956_rollback_delete_csv_files.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT Note: This rollback script primarily cleans up log entries.
|
||||
PROMPT Exported CSV files require manual removal from DATA bucket.
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Delete File Registrations
|
||||
PROMPT =========================================================================
|
||||
@@91_MARS_956_rollback_file_registrations.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Clean Process Logs
|
||||
PROMPT =========================================================================
|
||||
@@92_MARS_956_rollback_process_logs.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Rollback Completion
|
||||
PROMPT =========================================================================
|
||||
@@99_MARS_956_verify_rollback.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 Rollback - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT Check the log file for complete rollback details.
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -1,96 +0,0 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions for MARS-956
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-11
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
package_name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- MARS-956: Historical C2D MPEC data export - using existing packages
|
||||
-- No new packages created, tracking existing DATA_EXPORTER usage
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.DATA_EXPORTER'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('MARS-956: Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
|
||||
-- Get package version
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT ' || vOwner || '.' || vPackageName || '.GET_VERSION() FROM DUAL' INTO vVersion;
|
||||
vPackages.EXTEND;
|
||||
vPackages(vPackages.COUNT).owner := vOwner;
|
||||
vPackages(vPackages.COUNT).package_name := vPackageName;
|
||||
vPackages(vPackages.COUNT).version := vVersion;
|
||||
|
||||
-- Track in ENV_MANAGER
|
||||
BEGIN
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
||||
pPackageAuthor => 'Grzegorz Michalski'
|
||||
);
|
||||
vCount := vCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN NULL; -- Continue even if tracking fails
|
||||
END;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN NULL; -- Skip packages that fail
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Summary:');
|
||||
DBMS_OUTPUT.PUT_LINE('--------');
|
||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || '/' || vPackageList.COUNT);
|
||||
|
||||
IF vPackages.COUNT > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Tracked Packages:');
|
||||
FOR i IN 1..vPackages.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).package_name || ' v' || vPackages(i).version);
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
@@ -1,182 +0,0 @@
|
||||
-- ===================================================================
|
||||
-- MARS-956 POST-EXPORT VALIDATION SCRIPT
|
||||
-- ===================================================================
|
||||
-- Purpose: Validate C2D MPEC historical data export results
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-11
|
||||
--
|
||||
-- Run after MARS-956 installation to verify export success
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET SERVEROUTPUT ON SIZE 1000000
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-956 POST-EXPORT VALIDATION
|
||||
PROMPT =========================================================================
|
||||
PROMPT Validation Start:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS VALIDATION_START FROM DUAL;
|
||||
|
||||
PROMPT
|
||||
PROMPT 1. Export Process Log Review
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Check export completion status
|
||||
PROMPT Recent MARS-956 export activity:
|
||||
SELECT TO_CHAR(EVENT_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS EVENT_TIME,
|
||||
PROCEDURE_NAME,
|
||||
EVENT_TYPE,
|
||||
SUBSTR(EVENT_MESSAGE, 1, 80) AS MESSAGE_PREVIEW
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE PACKAGE_NAME = 'MARS-956'
|
||||
OR PROCEDURE_NAME LIKE '%MARS_956%'
|
||||
OR PROCEDURE_NAME LIKE '%DATA_EXPORTER%'
|
||||
ORDER BY EVENT_TIMESTAMP DESC
|
||||
FETCH FIRST 20 ROWS ONLY;
|
||||
|
||||
-- Check for any errors
|
||||
PROMPT Export errors (if any):
|
||||
SELECT TO_CHAR(EVENT_TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS ERROR_TIME,
|
||||
PROCEDURE_NAME,
|
||||
EVENT_MESSAGE
|
||||
FROM CT_MRDS.A_PROCESS_LOG
|
||||
WHERE (PACKAGE_NAME = 'MARS-956' OR PROCEDURE_NAME LIKE '%MARS_956%')
|
||||
AND EVENT_TYPE = 'ERROR'
|
||||
AND EVENT_TIMESTAMP >= SYSDATE - 1; -- Last 24 hours
|
||||
|
||||
PROMPT
|
||||
PROMPT 2. Source Table Row Counts
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Get source table counts for comparison
|
||||
PROMPT Source table row counts (OU_C2D):
|
||||
SELECT 'OU_C2D' AS SCHEMA_NAME,
|
||||
table_name,
|
||||
num_rows,
|
||||
TO_CHAR(last_analyzed, 'YYYY-MM-DD HH24:MI:SS') AS STATS_DATE
|
||||
FROM all_tables
|
||||
WHERE owner = 'OU_C2D'
|
||||
AND table_name IN ('MPEC_ADMIN', 'MPEC_CONTENT', 'MPEC_CONTENT_CRITERION')
|
||||
ORDER BY table_name;
|
||||
|
||||
PROMPT
|
||||
PROMPT 3. Template Table Structure Verification
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Verify template tables exist and have proper structure
|
||||
PROMPT Template tables in CT_ET_TEMPLATES:
|
||||
SELECT table_name,
|
||||
num_rows,
|
||||
TO_CHAR(last_analyzed, 'YYYY-MM-DD HH24:MI:SS') AS STATS_DATE
|
||||
FROM all_tables
|
||||
WHERE owner = 'CT_ET_TEMPLATES'
|
||||
AND table_name IN ('C2D_MPEC_ADMIN', 'C2D_MPEC_CONTENT', 'C2D_MPEC_CONTENT_CRITERION')
|
||||
ORDER BY table_name;
|
||||
|
||||
PROMPT
|
||||
PROMPT Template table column counts:
|
||||
SELECT owner, table_name, COUNT(*) AS COLUMN_COUNT
|
||||
FROM all_tab_columns
|
||||
WHERE owner IN ('OU_C2D', 'CT_ET_TEMPLATES')
|
||||
AND ((owner = 'OU_C2D' AND table_name IN ('MPEC_ADMIN', 'MPEC_CONTENT', 'MPEC_CONTENT_CRITERION'))
|
||||
OR (owner = 'CT_ET_TEMPLATES' AND table_name IN ('C2D_MPEC_ADMIN', 'C2D_MPEC_CONTENT', 'C2D_MPEC_CONTENT_CRITERION')))
|
||||
GROUP BY owner, table_name
|
||||
ORDER BY table_name, owner;
|
||||
|
||||
PROMPT
|
||||
PROMPT 4. File Registration Validation
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Check if exported files were registered in A_SOURCE_FILE_RECEIVED
|
||||
PROMPT Registered export files (last 24 hours):
|
||||
SELECT A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
ROUND(BYTES/1024, 2) AS SIZE_KB,
|
||||
PROCESSING_STATUS,
|
||||
TO_CHAR(RECEPTION_DATE, 'YYYY-MM-DD HH24:MI:SS') AS REGISTERED_TIME
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE RECEPTION_DATE >= SYSDATE - 1 -- Last 24 hours
|
||||
AND (SOURCE_FILE_NAME LIKE '%MPEC_%' OR A_SOURCE_FILE_CONFIG_KEY IN (
|
||||
SELECT A_SOURCE_FILE_CONFIG_KEY
|
||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||
WHERE A_SOURCE_KEY = 'C2D' AND TABLE_ID LIKE '%MPEC%'
|
||||
))
|
||||
ORDER BY RECEPTION_DATE DESC;
|
||||
|
||||
-- Count registered files per config key
|
||||
PROMPT File registration summary:
|
||||
SELECT
|
||||
CASE WHEN A_SOURCE_FILE_CONFIG_KEY = -1 THEN 'Default (no config)'
|
||||
ELSE 'Config Key: ' || A_SOURCE_FILE_CONFIG_KEY
|
||||
END AS CONFIG_INFO,
|
||||
COUNT(*) AS REGISTERED_FILES
|
||||
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE RECEPTION_DATE >= SYSDATE - 1 -- Last 24 hours
|
||||
AND (SOURCE_FILE_NAME LIKE '%MPEC_%' OR A_SOURCE_FILE_CONFIG_KEY IN (
|
||||
SELECT A_SOURCE_FILE_CONFIG_KEY
|
||||
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
|
||||
WHERE A_SOURCE_KEY = 'C2D' AND TABLE_ID LIKE '%MPEC%'
|
||||
))
|
||||
GROUP BY A_SOURCE_FILE_CONFIG_KEY
|
||||
ORDER BY A_SOURCE_FILE_CONFIG_KEY;
|
||||
|
||||
PROMPT
|
||||
PROMPT 5. Export File Validation Commands
|
||||
PROMPT =========================================================================
|
||||
|
||||
PROMPT To validate exported CSV files, use these OCI CLI commands:
|
||||
PROMPT
|
||||
PROMPT # List exported files
|
||||
PROMPT oci os object list --bucket-name mrds_data_dev --prefix "DATA/C2D/C2D_MPEC"
|
||||
PROMPT
|
||||
PROMPT # Check file sizes
|
||||
PROMPT oci os object list --bucket-name mrds_data_dev --prefix "DATA/C2D/C2D_MPEC_ADMIN"
|
||||
PROMPT oci os object list --bucket-name mrds_data_dev --prefix "DATA/C2D/C2D_MPEC_CONTENT"
|
||||
PROMPT oci os object list --bucket-name mrds_data_dev --prefix "DATA/C2D/C2D_MPEC_CONTENT_CRITERION"
|
||||
PROMPT
|
||||
PROMPT # Download sample file for validation
|
||||
PROMPT oci os object get --bucket-name mrds_data_dev --name "DATA/C2D/C2D_MPEC_ADMIN/filename.csv" --file sample.csv
|
||||
|
||||
PROMPT
|
||||
PROMPT 6. Data Quality Checks (Manual)
|
||||
PROMPT =========================================================================
|
||||
|
||||
PROMPT Manual verification steps:
|
||||
PROMPT 1. Download sample CSV files from each folder
|
||||
PROMPT 2. Verify CSV header matches template table columns
|
||||
PROMPT 3. Check data formats (especially dates) match expectations
|
||||
PROMPT 4. Confirm row counts approximately match source tables
|
||||
PROMPT 5. Validate no empty files were created
|
||||
PROMPT 6. Test loading sample data into external tables
|
||||
PROMPT 7. Verify file registration entries in A_SOURCE_FILE_RECEIVED
|
||||
|
||||
PROMPT
|
||||
PROMPT 7. Next Steps for ODS Integration
|
||||
PROMPT =========================================================================
|
||||
|
||||
PROMPT After validation success:
|
||||
PROMPT 1. Configure external tables pointing to CSV files
|
||||
PROMPT 2. Test external table queries
|
||||
PROMPT 3. Setup scheduled data refresh processes (if needed)
|
||||
PROMPT 4. Document file locations and access patterns
|
||||
PROMPT 5. Coordinate with REL_02 deployment timing
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT VALIDATION COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT Validation End:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS VALIDATION_END FROM DUAL;
|
||||
|
||||
PROMPT
|
||||
PROMPT Review the output above to confirm:
|
||||
PROMPT ✓ Export processes completed without errors
|
||||
PROMPT ✓ Source table row counts are reasonable
|
||||
PROMPT ✓ Template tables exist and have matching structure
|
||||
PROMPT ✓ Exported files registered in A_SOURCE_FILE_RECEIVED table
|
||||
PROMPT ✓ Manual file validation steps are understood
|
||||
PROMPT
|
||||
PROMPT If any issues found, check export logs and re-run specific exports if needed.
|
||||
PROMPT =========================================================================
|
||||
@@ -1,62 +0,0 @@
|
||||
-- ===================================================================
|
||||
-- Universal Package Version Verification Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify all tracked Oracle packages for code changes (MARS-956)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-11
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- USAGE:
|
||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
||||
--
|
||||
-- OUTPUT:
|
||||
-- - List of all tracked packages with their current status
|
||||
-- - OK: Package has not changed since last tracking
|
||||
-- - WARNING: Package code changed without version update
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET FEEDBACK OFF
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT MARS-956: Package Version Verification
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
|
||||
COLUMN PACKAGE_OWNER FORMAT A15
|
||||
COLUMN PACKAGE_NAME FORMAT A20
|
||||
COLUMN VERSION FORMAT A10
|
||||
COLUMN STATUS FORMAT A80
|
||||
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION AS VERSION,
|
||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
||||
FROM (
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION,
|
||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
)
|
||||
WHERE RN = 1
|
||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT MARS-956: Verification Complete
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
PROMPT Legend:
|
||||
PROMPT OK - Package has not changed since last tracking
|
||||
PROMPT WARNING - Package code changed without version update
|
||||
PROMPT
|
||||
PROMPT For detailed hash information, use:
|
||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
||||
PROMPT ========================================
|
||||
|
||||
SET FEEDBACK ON
|
||||
27
README.md
27
README.md
@@ -42,32 +42,31 @@ sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars1056.sql"
|
||||
New-Item -ItemType Directory -Force -Path "log" | Out-Null; Move-Item -Path "*.log" -Destination "log" -Force
|
||||
|
||||
cd .\MARS_Packages\REL02\MARS-1046
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars1046.sql"
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars1046.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars1046.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars1046.sql"
|
||||
7z a -pMojeSuperHaslo#123 -mhe=on M1046_arch.7z MARS-1046/
|
||||
|
||||
cd .\MARS_Packages\REL01_ADDITIONS\MARS-826-PREHOOK
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars826_prehook.sql"
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars826_prehook.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars826_prehook.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars826_prehook.sql"
|
||||
7z a -pMojeSuperHaslo#123 -mhe=on M826PH_arch.7z MARS-826-PREHOOK
|
||||
|
||||
cd .\MARS_Packages\REL01_ADDITIONS\MARS-826
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars826.sql"
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars826.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars826.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars826.sql"
|
||||
7z a -pMojeSuperHaslo#123 -mhe=on M826_arch.7z MARS-826\
|
||||
|
||||
cd .\MARS_Packages\REL01_ADDITIONS\MARS-835
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars835.sql"
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars835.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars835.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars835.sql"
|
||||
7z a -pMojeSuperHaslo#123 -mhe=on M835_arch.7z MARS-835
|
||||
|
||||
cd .\MARS_Packages\REL01_ADDITIONS\MARS-835-PREHOOK
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars835_prehook.sql"
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars835_prehook.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars835_prehook.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars835_prehook.sql"
|
||||
7z a -pMojeSuperHaslo#123 -mhe=on M835PH_arch.7z MARS-835-PREHOOK
|
||||
|
||||
|
||||
|
||||
cd .\MARS_Packages\REL03\MARS-1057
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars1057.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars1057.sql"
|
||||
@@ -78,3 +77,9 @@ cd .\MARS_Packages\REL01_ADDITIONS\MARS-828
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars828.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars828.sql"
|
||||
7z a -pMojeSuperHaslo#123 -mhe=on M828_arch.7z MARS-828\
|
||||
|
||||
|
||||
cd .\MARS_Packages\REL02_POST\MARS-956
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars956.sql"
|
||||
echo 'yes' | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars956.sql"
|
||||
7z a -pMojeSuperHaslo#123 -mhe=on M956_arch.7z MARS-956
|
||||
|
||||
Reference in New Issue
Block a user