Merge develop into main - added pJobClass parameter and MARS-826, MARS-835, MARS-828 updates
This commit is contained in:
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
@@ -64,7 +66,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -29,7 +29,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER',
|
||||
pParallelDegree => 4
|
||||
pParallelDegree => 4,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -49,7 +50,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM',
|
||||
pParallelDegree => 16
|
||||
pParallelDegree => 16,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
@@ -64,7 +66,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -29,7 +29,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES',
|
||||
pParallelDegree => 8
|
||||
pParallelDegree => 8,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
|
||||
EXCEPTION
|
||||
@@ -49,7 +50,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -25,7 +25,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -45,7 +46,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM',
|
||||
pParallelDegree => 16
|
||||
pParallelDegree => 16,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -29,7 +29,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER',
|
||||
pParallelDegree => 4
|
||||
pParallelDegree => 4,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -49,7 +50,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM',
|
||||
pParallelDegree => 16
|
||||
pParallelDegree => 16,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM',
|
||||
pParallelDegree => 4
|
||||
pParallelDegree => 4,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
@@ -64,7 +66,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER',
|
||||
pParallelDegree => 2
|
||||
pParallelDegree => 2,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -24,7 +24,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
|
||||
EXCEPTION
|
||||
@@ -44,7 +45,8 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM',
|
||||
pParallelDegree => 1
|
||||
pParallelDegree => 1,
|
||||
pJobClass => 'high'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
|
||||
EXCEPTION
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
-- ============================================================================
|
||||
-- MARS-835-PREHOOK Installation Script 02: DATA_EXPORTER Package
|
||||
-- ============================================================================
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package (SPEC + BODY) with parallel processing
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package (SPEC + BODY) v2.8.1
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE DATA_EXPORTER
|
||||
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
@@ -13,8 +14,8 @@ PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Installing CT_MRDS.DATA_EXPORTER Package
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.DATA_EXPORTER
|
||||
PROMPT Version: 2.2.0 -> 2.4.0 (MINOR)
|
||||
PROMPT Change: Added parallel processing + Smart Column Mapping for CSV exports
|
||||
PROMPT Version: 2.2.0 -> 2.8.1 (PATCH)
|
||||
PROMPT Change: Fixed query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join for single file
|
||||
PROMPT ============================================================================
|
||||
|
||||
PROMPT
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
-- ====================================================================
|
||||
-- MARS-835-PREHOOK: Update A_SOURCE_FILE_RECEIVED Table Structure
|
||||
-- ====================================================================
|
||||
-- Purpose:
|
||||
-- 1. Rename column ARCH_FILE_NAME to ARCH_PATH
|
||||
-- 2. Add new column PROCESS_NAME VARCHAR2(200)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-13
|
||||
-- ====================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT MARS-835-PREHOOK: Updating A_SOURCE_FILE_RECEIVED table structure
|
||||
PROMPT ====================================================================
|
||||
|
||||
-- Check if column ARCH_FILE_NAME exists
|
||||
DECLARE
|
||||
v_column_exists NUMBER;
|
||||
v_process_name_exists NUMBER;
|
||||
BEGIN
|
||||
-- Check if ARCH_FILE_NAME exists
|
||||
SELECT COUNT(*)
|
||||
INTO v_column_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'ARCH_FILE_NAME';
|
||||
|
||||
IF v_column_exists > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Renaming column ARCH_FILE_NAME to ARCH_PATH...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED RENAME COLUMN ARCH_FILE_NAME TO ARCH_PATH';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column renamed to ARCH_PATH');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column ARCH_FILE_NAME does not exist (already renamed or first install)');
|
||||
END IF;
|
||||
|
||||
-- Check if PROCESS_NAME already exists
|
||||
SELECT COUNT(*)
|
||||
INTO v_process_name_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'PROCESS_NAME';
|
||||
|
||||
IF v_process_name_exists = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Adding new column PROCESS_NAME...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED ADD (PROCESS_NAME VARCHAR2(200))';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column PROCESS_NAME added');
|
||||
|
||||
-- Add comment on new column
|
||||
EXECUTE IMMEDIATE 'COMMENT ON COLUMN CT_MRDS.A_SOURCE_FILE_RECEIVED.PROCESS_NAME IS ''Name of the process that created this record''';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Comment added to PROCESS_NAME column');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column PROCESS_NAME already exists');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: A_SOURCE_FILE_RECEIVED table structure updated successfully');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to update table structure: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT A_SOURCE_FILE_RECEIVED Table Update Completed
|
||||
PROMPT ====================================================================
|
||||
@@ -0,0 +1,65 @@
|
||||
-- ====================================================================
|
||||
-- MARS-835-PREHOOK ROLLBACK: Revert A_SOURCE_FILE_RECEIVED Table Structure
|
||||
-- ====================================================================
|
||||
-- Purpose:
|
||||
-- 1. Rename column ARCH_PATH back to ARCH_FILE_NAME
|
||||
-- 2. Remove column PROCESS_NAME
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-13
|
||||
-- ====================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT MARS-835-PREHOOK ROLLBACK: Reverting A_SOURCE_FILE_RECEIVED table
|
||||
PROMPT ====================================================================
|
||||
|
||||
DECLARE
|
||||
v_column_exists NUMBER;
|
||||
v_process_name_exists NUMBER;
|
||||
BEGIN
|
||||
-- Check if ARCH_PATH exists (needs to be renamed back)
|
||||
SELECT COUNT(*)
|
||||
INTO v_column_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'ARCH_PATH';
|
||||
|
||||
IF v_column_exists > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Renaming column ARCH_PATH back to ARCH_FILE_NAME...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED RENAME COLUMN ARCH_PATH TO ARCH_FILE_NAME';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column renamed back to ARCH_FILE_NAME');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column ARCH_PATH does not exist (already rolled back)');
|
||||
END IF;
|
||||
|
||||
-- Check if PROCESS_NAME exists (needs to be dropped)
|
||||
SELECT COUNT(*)
|
||||
INTO v_process_name_exists
|
||||
FROM dba_tab_columns
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND table_name = 'A_SOURCE_FILE_RECEIVED'
|
||||
AND column_name = 'PROCESS_NAME';
|
||||
|
||||
IF v_process_name_exists > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Dropping column PROCESS_NAME...');
|
||||
EXECUTE IMMEDIATE 'ALTER TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED DROP COLUMN PROCESS_NAME';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Column PROCESS_NAME dropped');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Column PROCESS_NAME does not exist (already rolled back)');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: A_SOURCE_FILE_RECEIVED table structure rollback completed');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to rollback table structure: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ====================================================================
|
||||
PROMPT A_SOURCE_FILE_RECEIVED Table Rollback Completed
|
||||
PROMPT ====================================================================
|
||||
@@ -31,6 +31,7 @@ PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will:
|
||||
PROMPT - Create A_PARALLEL_EXPORT_CHUNKS table with unique timestamp task names
|
||||
PROMPT - Update A_SOURCE_FILE_RECEIVED table (rename ARCH_FILE_NAME to ARCH_PATH, add PROCESS_NAME column)
|
||||
PROMPT - Update ENV_MANAGER to v3.2.0 (add parallel execution error codes)
|
||||
PROMPT - Update DATA_EXPORTER to v2.4.0 (DBMS_PARALLEL_EXECUTE + Smart Column Mapping)
|
||||
PROMPT - Add pParallelDegree parameter (1-16 threads) to EXPORT_*_BY_DATE procedures
|
||||
@@ -59,25 +60,31 @@ PROMPT =========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Deploy ENV_MANAGER Package
|
||||
PROMPT Step 2: Update A_SOURCE_FILE_RECEIVED Table Structure
|
||||
PROMPT =========================================================================
|
||||
@@03_MARS_835_PREHOOK_update_SOURCE_FILE_RECEIVED_table.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Deploy ENV_MANAGER Package
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_835_PREHOOK_install_ENV_MANAGER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Deploy DATA_EXPORTER Package
|
||||
PROMPT Step 4: Deploy DATA_EXPORTER Package
|
||||
PROMPT =========================================================================
|
||||
@@02_MARS_835_PREHOOK_install_DATA_EXPORTER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Track Package Versions
|
||||
PROMPT Step 5: Track Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 5: Verify Package Versions
|
||||
PROMPT Step 6: Verify Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
||||
FILE_BASE_NAME VARCHAR2(1000),
|
||||
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
||||
MAX_FILE_SIZE NUMBER DEFAULT 104857600 NOT NULL,
|
||||
JOB_CLASS VARCHAR2(128),
|
||||
STATUS VARCHAR2(30) DEFAULT 'PENDING' NOT NULL,
|
||||
ERROR_MESSAGE VARCHAR2(4000),
|
||||
EXPORT_TIMESTAMP TIMESTAMP,
|
||||
@@ -69,6 +70,7 @@ COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FORMAT_TYPE IS 'Export format
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FILE_BASE_NAME IS 'Base filename for CSV exports (NULL for Parquet)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TEMPLATE_TABLE_NAME IS 'Template table name for per-column date format configuration (e.g., CT_ET_TEMPLATES.TABLE_NAME)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MAX_FILE_SIZE IS 'Maximum file size in bytes for CSV exports only (e.g., 104857600 = 100MB, 1073741824 = 1GB) - default 100MB (104857600). NOTE: Not applicable for PARQUET format (Oracle limitation)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.JOB_CLASS IS 'Oracle Scheduler job class name for resource management (e.g., ''high'', ''DEFAULT_JOB_CLASS'') - NULL uses default scheduler priority';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.STATUS IS 'Chunk processing status: PENDING (not started), PROCESSING (in progress), COMPLETED (success), FAILED (error) - allows retry of failed partitions only';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.ERROR_MESSAGE IS 'Error message if chunk processing failed (STATUS = FAILED)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.EXPORT_TIMESTAMP IS 'Timestamp when chunk export was completed (STATUS = COMPLETED)';
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
-- ====================================================================
|
||||
-- A_SOURCE_FILE_RECEIVED Table
|
||||
-- ====================================================================
|
||||
-- Purpose: Track received files and their processing status
|
||||
-- ====================================================================
|
||||
|
||||
CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
A_SOURCE_FILE_CONFIG_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
SOURCE_FILE_NAME VARCHAR2(1000) NOT NULL,
|
||||
CHECKSUM VARCHAR2(128),
|
||||
CREATED TIMESTAMP(6) WITH TIME ZONE,
|
||||
BYTES NUMBER,
|
||||
RECEPTION_DATE DATE NOT NULL,
|
||||
PROCESSING_STATUS VARCHAR2(200),
|
||||
EXTERNAL_TABLE_NAME VARCHAR2(200),
|
||||
PARTITION_YEAR VARCHAR2(4),
|
||||
PARTITION_MONTH VARCHAR2(2),
|
||||
ARCH_PATH VARCHAR2(1000),
|
||||
PROCESS_NAME VARCHAR2(200),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_PK PRIMARY KEY (A_SOURCE_FILE_RECEIVED_KEY),
|
||||
CONSTRAINT ASFR_A_SOURCE_FILE_CONFIG_KEY_FK FOREIGN KEY(A_SOURCE_FILE_CONFIG_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_CHK CHECK (PROCESSING_STATUS IN ('RECEIVED', 'VALIDATED', 'READY_FOR_INGESTION', 'INGESTED', 'ARCHIVED'))
|
||||
) TABLESPACE "DATA";
|
||||
|
||||
-- Unique index for file identification (workaround for TIMESTAMP WITH TIMEZONE constraint limitation)
|
||||
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||
@@ -501,6 +501,7 @@ AS
|
||||
vFormat VARCHAR2(20);
|
||||
vFileBaseName VARCHAR2(1000);
|
||||
vMaxFileSize NUMBER;
|
||||
vJobClass VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
BEGIN
|
||||
-- Retrieve chunk context from global temporary table
|
||||
@@ -518,7 +519,8 @@ AS
|
||||
CREDENTIAL_NAME,
|
||||
FORMAT_TYPE,
|
||||
FILE_BASE_NAME,
|
||||
MAX_FILE_SIZE
|
||||
MAX_FILE_SIZE,
|
||||
JOB_CLASS
|
||||
INTO
|
||||
vYear,
|
||||
vMonth,
|
||||
@@ -533,7 +535,8 @@ AS
|
||||
vCredentialName,
|
||||
vFormat,
|
||||
vFileBaseName,
|
||||
vMaxFileSize
|
||||
vMaxFileSize,
|
||||
vJobClass
|
||||
FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||
WHERE CHUNK_ID = pStartId;
|
||||
|
||||
@@ -602,20 +605,17 @@ AS
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 default NULL,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
@@ -638,8 +638,11 @@ AS
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pTemplateTableName => '''||nvl(pTemplateTableName, 'NULL')||''''
|
||||
,'pMaxFileSize => '''||nvl(TO_CHAR(pMaxFileSize), 'NULL')||''''
|
||||
,'pRegisterExport => '''||CASE WHEN pRegisterExport THEN 'TRUE' ELSE 'FALSE' END||''''
|
||||
,'pProcessName => '''||nvl(pProcessName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
@@ -671,16 +674,8 @@ AS
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
-- Validate template table if provided
|
||||
IF pTemplateTableName IS NOT NULL THEN
|
||||
DECLARE
|
||||
@@ -760,183 +755,174 @@ AS
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File registration enabled with config key: ' || vConfigKey, 'INFO', vParameters);
|
||||
END IF;
|
||||
|
||||
-- Fetch unique key values from A_LOAD_HISTORY
|
||||
vSql := 'SELECT DISTINCT L.A_ETL_LOAD_SET_KEY' ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY';
|
||||
-- Construct single query for entire table (no join with A_LOAD_HISTORY - ensures single file output)
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T';
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
NVL(pFileName, UPPER(vTableName) || '.csv');
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to single file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Max file size: ' || pMaxFileSize || ' bytes (' || ROUND(pMaxFileSize/1048576, 2) || ' MB)', 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
-- Oracle maxfilesize: min 10MB (10485760), max 1GB (1073741824), default 100MB (104857600)
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object(
|
||||
'type' VALUE 'CSV',
|
||||
'header' VALUE true,
|
||||
'quote' VALUE CHR(34),
|
||||
'delimiter' VALUE ',',
|
||||
'escape' VALUE true,
|
||||
'recorddelimiter' VALUE CHR(13)||CHR(10), -- CRLF dla Windows
|
||||
'maxfilesize' VALUE pMaxFileSize -- Dynamic maxfilesize in bytes
|
||||
)
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing key values query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValues.COUNT || ' unique key values to process', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value with A_WORKFLOW_HISTORY_KEY mapping
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing key value: ' || vKeyValue || ' (' || (i) || '/' || vKeyValues.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
|
||||
-- Register exported file to A_SOURCE_FILE_RECEIVED if requested
|
||||
IF pRegisterExport THEN
|
||||
DECLARE
|
||||
vChecksum VARCHAR2(128);
|
||||
vCreated TIMESTAMP WITH TIME ZONE;
|
||||
vBytes NUMBER;
|
||||
vActualFileName VARCHAR2(1000); -- Actual filename with Oracle suffix
|
||||
vSanitizedFileName VARCHAR2(1000);
|
||||
vFileName VARCHAR2(1000);
|
||||
vRetryCount NUMBER := 0;
|
||||
vMaxRetries NUMBER := 1; -- One retry after initial attempt
|
||||
vRetryDelay NUMBER := 2; -- 2 seconds delay
|
||||
BEGIN
|
||||
-- Extract filename from URI (after last '/')
|
||||
vFileName := SUBSTR(vUri, INSTR(vUri, '/', -1) + 1);
|
||||
|
||||
-- Sanitize filename first (PL/SQL function cannot be used directly in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Remove .csv extension for LIKE pattern matching (Oracle adds suffixes BEFORE .csv)
|
||||
-- Example: keyvalue.csv becomes keyvalue_1_20260211T102621591769Z.csv
|
||||
vSanitizedFileName := REGEXP_REPLACE(vSanitizedFileName, '\.csv$', '', 1, 0, 'i');
|
||||
|
||||
-- Try to get file metadata with retry logic
|
||||
<<metadata_retry_loop>>
|
||||
LOOP
|
||||
BEGIN
|
||||
SELECT object_name, checksum, created, bytes
|
||||
INTO vActualFileName, vChecksum, vCreated, vBytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => pCredentialName,
|
||||
location_uri => vBucketUri
|
||||
))
|
||||
WHERE object_name LIKE CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || vSanitizedFileName || '%'
|
||||
ORDER BY created DESC, bytes DESC
|
||||
FETCH FIRST 1 ROW ONLY;
|
||||
|
||||
-- Extract filename only from full path (remove bucket folder prefix)
|
||||
vActualFileName := SUBSTR(vActualFileName, INSTR(vActualFileName, '/', -1) + 1);
|
||||
|
||||
-- Success - exit retry loop
|
||||
EXIT metadata_retry_loop;
|
||||
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
vRetryCount := vRetryCount + 1;
|
||||
|
||||
IF vRetryCount <= vMaxRetries THEN
|
||||
-- Log retry attempt
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File not found in bucket (attempt ' || vRetryCount || '/' || (vMaxRetries + 1) || '), retrying after ' || vRetryDelay || ' seconds: ' || vFileName, 'DEBUG', vParameters);
|
||||
|
||||
-- Wait before retry using DBMS_SESSION.SLEEP (alternative to DBMS_LOCK)
|
||||
DBMS_SESSION.SLEEP(vRetryDelay);
|
||||
ELSE
|
||||
-- Max retries exceeded - re-raise exception
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP metadata_retry_loop;
|
||||
|
||||
-- Create A_SOURCE_FILE_RECEIVED record for this export with metadata
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
CHECKSUM,
|
||||
CREATED,
|
||||
BYTES,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vActualFileName, -- Use actual filename with Oracle suffix
|
||||
vChecksum,
|
||||
vCreated,
|
||||
vBytes,
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL -- ARCH_FILE_NAME not used for single-file exports
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || vBytes || ' bytes', 'DEBUG', vParameters);
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
-- File not found after retries - log warning and continue without metadata
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('WARNING: File not found in bucket after ' || (vMaxRetries + 1) || ' attempts: ' || vFileName, 'WARNING', vParameters);
|
||||
|
||||
-- Sanitize filename for fallback INSERT (function cannot be used in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Insert without metadata using theoretical filename
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vSanitizedFileName, -- Use pre-calculated sanitized filename
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL -- ARCH_FILE_NAME not used for single-file exports
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file without metadata: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vSanitizedFileName, 'DEBUG', vParameters);
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- Log summary of file registration if enabled
|
||||
-- Register exported file to A_SOURCE_FILE_RECEIVED if requested
|
||||
IF pRegisterExport THEN
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered ' || vKeyValues.COUNT || ' exported files to A_SOURCE_FILE_RECEIVED with config key: ' || vConfigKey, 'INFO', vParameters);
|
||||
DECLARE
|
||||
vActualFileName VARCHAR2(1000); -- Actual filename with Oracle suffix
|
||||
vSanitizedFileName VARCHAR2(1000);
|
||||
vFileName VARCHAR2(1000);
|
||||
vRetryCount NUMBER := 0;
|
||||
vMaxRetries NUMBER := 1; -- One retry after initial attempt
|
||||
vRetryDelay NUMBER := 2; -- 2 seconds delay
|
||||
vFilesFound NUMBER := 0;
|
||||
vTotalBytes NUMBER := 0;
|
||||
BEGIN
|
||||
-- Extract filename from URI (after last '/')
|
||||
vFileName := SUBSTR(vUri, INSTR(vUri, '/', -1) + 1);
|
||||
|
||||
-- Sanitize filename first (PL/SQL function cannot be used directly in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Remove .csv extension for LIKE pattern matching (Oracle adds suffixes BEFORE .csv)
|
||||
-- Example: tablename.csv becomes tablename_1_20260211T102621591769Z.csv
|
||||
vSanitizedFileName := REGEXP_REPLACE(vSanitizedFileName, '\.csv$', '', 1, 0, 'i');
|
||||
|
||||
-- Try to get ALL exported files with retry logic
|
||||
-- Oracle DBMS_CLOUD.EXPORT_DATA can create MULTIPLE files due to:
|
||||
-- 1. maxfilesize parameter (splits files larger than limit)
|
||||
-- 2. Automatic parallel processing (especially on large production instances)
|
||||
-- We must register ALL files, not just the first one
|
||||
<<metadata_retry_loop>>
|
||||
LOOP
|
||||
BEGIN
|
||||
-- Register ALL files matching the pattern (cursor loop)
|
||||
FOR rec IN (
|
||||
SELECT object_name, checksum, created, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => pCredentialName,
|
||||
location_uri => vBucketUri
|
||||
))
|
||||
WHERE object_name LIKE CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || vSanitizedFileName || '%'
|
||||
ORDER BY created DESC, bytes DESC
|
||||
) LOOP
|
||||
-- Extract filename only from full path (remove bucket folder prefix)
|
||||
vActualFileName := SUBSTR(rec.object_name, INSTR(rec.object_name, '/', -1) + 1);
|
||||
|
||||
-- Create A_SOURCE_FILE_RECEIVED record for EACH exported file
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
CHECKSUM,
|
||||
CREATED,
|
||||
BYTES,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vActualFileName, -- Use actual filename with Oracle suffix
|
||||
rec.checksum,
|
||||
rec.created,
|
||||
rec.bytes,
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL, -- ARCH_PATH not used for single-file exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
|
||||
vFilesFound := vFilesFound + 1;
|
||||
vTotalBytes := vTotalBytes + rec.bytes;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file ' || vFilesFound || ': FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || rec.bytes || ' bytes', 'INFO', vParameters);
|
||||
END LOOP;
|
||||
|
||||
-- Check if any files were found
|
||||
IF vFilesFound = 0 THEN
|
||||
RAISE NO_DATA_FOUND;
|
||||
END IF;
|
||||
|
||||
-- Success - exit retry loop
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Total registered: ' || vFilesFound || ' file(s), Total size: ' || vTotalBytes || ' bytes (' || ROUND(vTotalBytes/1048576, 2) || ' MB)', 'INFO', vParameters);
|
||||
EXIT metadata_retry_loop;
|
||||
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
vRetryCount := vRetryCount + 1;
|
||||
|
||||
IF vRetryCount <= vMaxRetries THEN
|
||||
-- Log retry attempt
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File(s) not found in bucket (attempt ' || vRetryCount || '/' || (vMaxRetries + 1) || '), retrying after ' || vRetryDelay || ' seconds: ' || vFileName, 'DEBUG', vParameters);
|
||||
|
||||
-- Wait before retry using DBMS_SESSION.SLEEP (alternative to DBMS_LOCK)
|
||||
DBMS_SESSION.SLEEP(vRetryDelay);
|
||||
ELSE
|
||||
-- Max retries exceeded - re-raise exception
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP metadata_retry_loop;
|
||||
EXCEPTION
|
||||
WHEN NO_DATA_FOUND THEN
|
||||
-- File not found after retries - log warning and continue without metadata
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('WARNING: File not found in bucket after ' || (vMaxRetries + 1) || ' attempts: ' || vFileName, 'WARNING', vParameters);
|
||||
|
||||
-- Sanitize filename for fallback INSERT (function cannot be used in SQL)
|
||||
vSanitizedFileName := sanitizeFilename(vFileName);
|
||||
|
||||
-- Insert without metadata using theoretical filename
|
||||
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
|
||||
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY,
|
||||
A_SOURCE_FILE_CONFIG_KEY,
|
||||
SOURCE_FILE_NAME,
|
||||
RECEPTION_DATE,
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
|
||||
vSanitizedFileName, -- Use pre-calculated sanitized filename
|
||||
SYSDATE,
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for single-file exports
|
||||
NULL, -- PARTITION_MONTH not used for single-file exports
|
||||
NULL, -- ARCH_PATH not used for single-file exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file without metadata: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vSanitizedFileName, 'INFO', vParameters);
|
||||
END;
|
||||
END IF;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
@@ -949,10 +935,6 @@ AS
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in column list' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
@@ -974,6 +956,7 @@ AS
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
@@ -997,6 +980,7 @@ AS
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pParallelDegree => '''||nvl(TO_CHAR(pParallelDegree), 'NULL')||''''
|
||||
,'pTemplateTableName => '''||nvl(pTemplateTableName, 'NULL')||''''
|
||||
,'pJobClass => '''||nvl(pJobClass, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
@@ -1068,22 +1052,7 @@ AS
|
||||
vChunkId NUMBER;
|
||||
BEGIN
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Using parallel processing with ' || pParallelDegree || ' threads', 'INFO', vParameters);
|
||||
|
||||
-- Clean up old completed chunks (>24 hours) to prevent table bloat
|
||||
-- CRITICAL: Do NOT delete chunks from other active sessions (same-day tasks)
|
||||
-- This prevents race conditions when multiple exports run simultaneously
|
||||
DELETE FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||
WHERE STATUS = 'COMPLETED'
|
||||
AND CREATED_DATE < SYSTIMESTAMP - INTERVAL '1' DAY;
|
||||
COMMIT;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Cleared old COMPLETED chunks (>24h). Active session chunks preserved.', 'DEBUG', vParameters);
|
||||
-- This prevents re-exporting successfully completed partitions
|
||||
DELETE FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE STATUS = 'COMPLETED';
|
||||
COMMIT;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Cleared COMPLETED chunks. FAILED chunks retained for retry.', 'DEBUG', vParameters);
|
||||
|
||||
|
||||
-- Populate chunks table (insert new chunks, preserve FAILED chunks for retry)
|
||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||
MERGE INTO CT_MRDS.A_PARALLEL_EXPORT_CHUNKS t
|
||||
@@ -1092,10 +1061,10 @@ AS
|
||||
WHEN NOT MATCHED THEN
|
||||
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
||||
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, STATUS)
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, JOB_CLASS, STATUS)
|
||||
VALUES (i, vTaskName, vPartitions(i).year, vPartitions(i).month, vSchemaName, vTableName, vKeyColumnName,
|
||||
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
||||
pCredentialName, 'PARQUET', NULL, pTemplateTableName, 104857600, 'PENDING')
|
||||
pCredentialName, 'PARQUET', NULL, pTemplateTableName, 104857600, pJobClass, 'PENDING')
|
||||
WHEN MATCHED THEN
|
||||
UPDATE SET TASK_NAME = vTaskName,
|
||||
STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
||||
@@ -1127,14 +1096,24 @@ AS
|
||||
);
|
||||
|
||||
-- Execute task in parallel
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel task: ' || vTaskName, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel task: ' || vTaskName || CASE WHEN pJobClass IS NOT NULL THEN ' with job class: ' || pJobClass ELSE '' END, 'DEBUG', vParameters);
|
||||
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
IF pJobClass IS NOT NULL THEN
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree,
|
||||
job_class => pJobClass
|
||||
);
|
||||
ELSE
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- Check for errors
|
||||
DECLARE
|
||||
@@ -1238,6 +1217,8 @@ AS
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
@@ -1275,6 +1256,7 @@ AS
|
||||
,'pTemplateTableName => '''||nvl(pTemplateTableName, 'NULL')||''''
|
||||
,'pMaxFileSize => '''||nvl(TO_CHAR(pMaxFileSize), 'NULL')||''''
|
||||
,'pRegisterExport => '''||CASE WHEN pRegisterExport THEN 'TRUE' ELSE 'FALSE' END||''''
|
||||
,'pJobClass => '''||nvl(pJobClass, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
@@ -1383,10 +1365,10 @@ AS
|
||||
WHEN NOT MATCHED THEN
|
||||
INSERT (CHUNK_ID, TASK_NAME, YEAR_VALUE, MONTH_VALUE, SCHEMA_NAME, TABLE_NAME, KEY_COLUMN_NAME,
|
||||
BUCKET_URI, FOLDER_NAME, PROCESSED_COLUMNS, MIN_DATE, MAX_DATE,
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, STATUS)
|
||||
CREDENTIAL_NAME, FORMAT_TYPE, FILE_BASE_NAME, TEMPLATE_TABLE_NAME, MAX_FILE_SIZE, JOB_CLASS, STATUS)
|
||||
VALUES (i, vTaskName, vPartitions(i).year, vPartitions(i).month, vSchemaName, vTableName, vKeyColumnName,
|
||||
vBucketUri, pFolderName, vProcessedColumnList, pMinDate, pMaxDate,
|
||||
pCredentialName, 'CSV', vFileBaseName, pTemplateTableName, pMaxFileSize, 'PENDING')
|
||||
pCredentialName, 'CSV', vFileBaseName, pTemplateTableName, pMaxFileSize, pJobClass, 'PENDING')
|
||||
WHEN MATCHED THEN
|
||||
UPDATE SET TASK_NAME = vTaskName,
|
||||
STATUS = CASE WHEN t.STATUS = 'FAILED' THEN 'PENDING' ELSE t.STATUS END,
|
||||
@@ -1418,14 +1400,24 @@ AS
|
||||
);
|
||||
|
||||
-- Execute task in parallel
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel CSV export task: ' || vTaskName, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing parallel CSV export task: ' || vTaskName || CASE WHEN pJobClass IS NOT NULL THEN ' with job class: ' || pJobClass ELSE '' END, 'DEBUG', vParameters);
|
||||
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
IF pJobClass IS NOT NULL THEN
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree,
|
||||
job_class => pJobClass
|
||||
);
|
||||
ELSE
|
||||
DBMS_PARALLEL_EXECUTE.RUN_TASK(
|
||||
task_name => vTaskName,
|
||||
sql_stmt => 'BEGIN CT_MRDS.DATA_EXPORTER.EXPORT_PARTITION_PARALLEL(:start_id, :end_id); END;',
|
||||
language_flag => DBMS_SQL.NATIVE,
|
||||
parallel_level => pParallelDegree
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- Check for errors
|
||||
DECLARE
|
||||
@@ -1584,7 +1576,8 @@ AS
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
vConfigKey, -- Config key from A_SOURCE_FILE_CONFIG lookup
|
||||
@@ -1596,7 +1589,8 @@ AS
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for CSV exports
|
||||
NULL, -- PARTITION_MONTH not used for CSV exports
|
||||
NULL -- ARCH_FILE_NAME not used for CSV exports
|
||||
NULL, -- ARCH_PATH not used for CSV exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || vBytes || ' bytes', 'DEBUG', vParameters);
|
||||
@@ -1618,7 +1612,8 @@ AS
|
||||
PROCESSING_STATUS,
|
||||
PARTITION_YEAR,
|
||||
PARTITION_MONTH,
|
||||
ARCH_FILE_NAME
|
||||
ARCH_PATH,
|
||||
PROCESS_NAME
|
||||
) VALUES (
|
||||
vSourceFileReceivedKey,
|
||||
vConfigKey, -- Config key from A_SOURCE_FILE_CONFIG lookup
|
||||
@@ -1627,7 +1622,8 @@ AS
|
||||
'INGESTED',
|
||||
NULL, -- PARTITION_YEAR not used for CSV exports
|
||||
NULL, -- PARTITION_MONTH not used for CSV exports
|
||||
NULL -- ARCH_FILE_NAME not used for CSV exports
|
||||
NULL, -- ARCH_PATH not used for CSV exports
|
||||
pProcessName -- Process name from parameter
|
||||
);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
@@ -9,21 +9,17 @@ AS
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.5';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-11 12:15:00';
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.11.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-18 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10) ||
|
||||
'v2.7.3 (2026-02-11): FIX LIKE pattern for DBMS_CLOUD.LIST_OBJECTS - Removed .csv extension from filename before pattern matching.' || CHR(10) ||
|
||||
'v2.7.2 (2026-02-11): FIX pRegisterExport in EXPORT_TABLE_DATA_TO_CSV_BY_DATE - Added missing pRegisterExport parameter to EXPORT_SINGLE_PARTITION call.' || CHR(10) ||
|
||||
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName to automatically find config key from A_SOURCE_FILE_CONFIG.' || CHR(10) ||
|
||||
'v2.7.0 (2026-02-10): Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM properly assigned to vgMsgTmp variable.' || CHR(10) ||
|
||||
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Session-safe cleanup with TASK_NAME filtering.' || CHR(10) ||
|
||||
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry.' || CHR(10);
|
||||
'v2.11.0 (2026-02-18): Added pJobClass parameter to EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE for Oracle Scheduler job class support (resource/priority management).' || CHR(10) ||
|
||||
'v2.10.1 (2026-02-17): CRITICAL FIX - Remove redundant COMPLETED chunks deletion before parallel export that caused ORA-01403 errors (phantom chunks created by CREATE_CHUNKS_BY_NUMBER_COL).' || CHR(10) ||
|
||||
'v2.10.0 (2026-02-13): CRITICAL FIX - Register ALL files created by DBMS_CLOUD.EXPORT_DATA (multi-file support due to Oracle parallel processing on large instances). Prevents orphaned files in rollback.' || CHR(10) ||
|
||||
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
|
||||
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
@@ -71,16 +67,19 @@ AS
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* Exports data into single CSV file on OCI infrastructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* Supports template table for column order and per-column date formatting.
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* When pRegisterExport=TRUE, successfully exported file is registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pFileName - Optional filename (e.g., 'export.csv'). NULL = auto-generate from table name
|
||||
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||
* - Column order control (template defines CSV structure)
|
||||
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||
* - NULL = use source table columns in natural order
|
||||
* @param pRegisterExport - When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @param pMaxFileSize - Maximum file size in bytes (default 104857600 = 100MB, min 10MB, max 1GB)
|
||||
* @param pRegisterExport - When TRUE, registers exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
@@ -89,7 +88,9 @@ AS
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports',
|
||||
* pFileName => 'my_export.csv', -- Optional
|
||||
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||
* pMaxFileSize => 104857600, -- Optional, default 100MB
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||
* );
|
||||
* end;
|
||||
@@ -100,8 +101,11 @@ AS
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 default NULL,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
@@ -143,6 +147,7 @@ AS
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
@@ -158,6 +163,7 @@ AS
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
@@ -203,6 +209,8 @@ AS
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pJobClass IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ PROMPT MARS-835-PREHOOK: Rollback to Previous Versions
|
||||
PROMPT =========================================================================
|
||||
PROMPT WARNING: This will reverse all changes from MARS-835-PREHOOK installation!
|
||||
PROMPT - Removes A_PARALLEL_EXPORT_CHUNKS table
|
||||
PROMPT - Reverts A_SOURCE_FILE_RECEIVED table (rename ARCH_PATH to ARCH_FILE_NAME, drop PROCESS_NAME column)
|
||||
PROMPT - Restores ENV_MANAGER v3.1.0 (removes parallel error codes)
|
||||
PROMPT - Restores DATA_EXPORTER v2.1.0 (removes parallel + Smart Column Mapping)
|
||||
PROMPT =========================================================================
|
||||
@@ -65,13 +66,19 @@ PROMPT =========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Track Rollback Version
|
||||
PROMPT Step 3: Rollback A_SOURCE_FILE_RECEIVED Table Structure
|
||||
PROMPT =========================================================================
|
||||
@@93_MARS_835_PREHOOK_rollback_SOURCE_FILE_RECEIVED_table.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Track Rollback Version
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Package Versions After Rollback
|
||||
PROMPT Step 5: Verify Package Versions After Rollback
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
-- ====================================================================
|
||||
-- A_SOURCE_FILE_RECEIVED Table
|
||||
-- ====================================================================
|
||||
-- Purpose: Track received files and their processing status
|
||||
-- ====================================================================
|
||||
|
||||
CREATE TABLE CT_MRDS.A_SOURCE_FILE_RECEIVED (
|
||||
A_SOURCE_FILE_RECEIVED_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
A_SOURCE_FILE_CONFIG_KEY NUMBER(38,0) NOT NULL ENABLE,
|
||||
SOURCE_FILE_NAME VARCHAR2(1000) NOT NULL,
|
||||
CHECKSUM VARCHAR2(128),
|
||||
CREATED TIMESTAMP(6) WITH TIME ZONE,
|
||||
BYTES NUMBER,
|
||||
RECEPTION_DATE DATE NOT NULL,
|
||||
PROCESSING_STATUS VARCHAR2(200),
|
||||
EXTERNAL_TABLE_NAME VARCHAR2(200),
|
||||
PARTITION_YEAR VARCHAR2(4),
|
||||
PARTITION_MONTH VARCHAR2(2),
|
||||
ARCH_FILE_NAME VARCHAR2(1000),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_PK PRIMARY KEY (A_SOURCE_FILE_RECEIVED_KEY),
|
||||
CONSTRAINT ASFR_A_SOURCE_FILE_CONFIG_KEY_FK FOREIGN KEY(A_SOURCE_FILE_CONFIG_KEY) REFERENCES CT_MRDS.A_SOURCE_FILE_CONFIG(A_SOURCE_FILE_CONFIG_KEY),
|
||||
CONSTRAINT A_SOURCE_FILE_RECEIVED_CHK CHECK (PROCESSING_STATUS IN ('RECEIVED', 'VALIDATED', 'READY_FOR_INGESTION', 'INGESTED', 'ARCHIVED'))
|
||||
) TABLESPACE "DATA";
|
||||
|
||||
-- Unique index for file identification (workaround for TIMESTAMP WITH TIMEZONE constraint limitation)
|
||||
CREATE UNIQUE INDEX CT_MRDS.A_SOURCE_FILE_RECEIVED_UK1
|
||||
ON CT_MRDS.A_SOURCE_FILE_RECEIVED(CHECKSUM, CREATED, BYTES);
|
||||
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON CT_MRDS.A_SOURCE_FILE_RECEIVED TO MRDS_LOADER_ROLE;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,233 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.5';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-11 12:15:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10) ||
|
||||
'v2.7.3 (2026-02-11): FIX LIKE pattern for DBMS_CLOUD.LIST_OBJECTS - Removed .csv extension from filename before pattern matching.' || CHR(10) ||
|
||||
'v2.7.2 (2026-02-11): FIX pRegisterExport in EXPORT_TABLE_DATA_TO_CSV_BY_DATE - Added missing pRegisterExport parameter to EXPORT_SINGLE_PARTITION call.' || CHR(10) ||
|
||||
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName to automatically find config key from A_SOURCE_FILE_CONFIG.' || CHR(10) ||
|
||||
'v2.7.0 (2026-02-10): Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM properly assigned to vgMsgTmp variable.' || CHR(10) ||
|
||||
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Session-safe cleanup with TASK_NAME filtering.' || CHR(10) ||
|
||||
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry.' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* Supports template table for column order and per-column date formatting.
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||
* - Column order control (template defines CSV structure)
|
||||
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||
* - NULL = use source table columns in natural order
|
||||
* @param pRegisterExport - When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports',
|
||||
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17',
|
||||
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
||||
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||
* It is a standard.
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Example comment:
|
||||
/**
|
||||
* @name EX_PROCEDURE_NAME
|
||||
* @desc Procedure description
|
||||
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||
* @ex_rslt Example Result
|
||||
**/
|
||||
|
||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (Latest changes first)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||
|
||||
TYPE Error_Record IS RECORD (
|
||||
code PLS_INTEGER,
|
||||
message VARCHAR2(4000)
|
||||
);
|
||||
|
||||
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||
|
||||
Errors tErrorList;
|
||||
|
||||
|
||||
guid VARCHAR2(32);
|
||||
gvEnv VARCHAR2(200);
|
||||
gvUsername VARCHAR2(128);
|
||||
gvOsuser VARCHAR2(128);
|
||||
gvMachine VARCHAR2(64);
|
||||
gvModule VARCHAR2(64);
|
||||
|
||||
gvNameSpace VARCHAR2(200);
|
||||
gvRegion VARCHAR2(200);
|
||||
gvDataBucketName VARCHAR2(200);
|
||||
gvInboxBucketName VARCHAR2(200);
|
||||
gvArchiveBucketName VARCHAR2(200);
|
||||
gvDataBucketUri VARCHAR2(200);
|
||||
gvInboxBucketUri VARCHAR2(200);
|
||||
gvArchiveBucketUri VARCHAR2(200);
|
||||
gvCredentialName VARCHAR2(200);
|
||||
|
||||
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||
|
||||
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||
|
||||
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
|
||||
vgSourceFileConfigKey PLS_INTEGER;
|
||||
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
--Exceptions
|
||||
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||
|
||||
|
||||
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||
|
||||
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||
|
||||
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||
|
||||
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||
|
||||
|
||||
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||
|
||||
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||
|
||||
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||
,CODE_FILE_VALIDATION_FAILED);
|
||||
|
||||
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||
|
||||
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||
,CODE_NO_CONFIG_MATCH);
|
||||
|
||||
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||
,CODE_UNKNOWN_PREFIX);
|
||||
|
||||
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||
,CODE_TABLE_NOT_EXISTS);
|
||||
|
||||
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||
,CODE_COLUMN_NOT_EXISTS);
|
||||
|
||||
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||
|
||||
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||
,CODE_MISSING_SOURCE_KEY);
|
||||
|
||||
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||
|
||||
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||
,CODE_DUPLICATED_SOURCE_KEY);
|
||||
|
||||
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||
,CODE_MISSING_CONTAINER_CONFIG);
|
||||
|
||||
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||
|
||||
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||
,CODE_WRONG_DESTINATION_PARAM);
|
||||
|
||||
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||
|
||||
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||
,CODE_FILE_ALREADY_REGISTERED);
|
||||
|
||||
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||
|
||||
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||
||cgBL||' It can be set up in two different ways:'
|
||||
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||
;
|
||||
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||
,CODE_ENVIRONMENT_NOT_SET);
|
||||
|
||||
|
||||
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||
|
||||
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||
|
||||
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||
|
||||
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||
|
||||
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||
|
||||
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||
|
||||
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||
|
||||
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||
,CODE_INVALID_BUCKET_AREA);
|
||||
|
||||
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||
,CODE_INVALID_PARALLEL_DEGREE);
|
||||
|
||||
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||
|
||||
ERR_UNKNOWN EXCEPTION;
|
||||
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||
,CODE_UNKNOWN);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_EVENT
|
||||
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||
* Respects logging level configuration (gvMinLogLevel).
|
||||
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||
**/
|
||||
PROCEDURE LOG_PROCESS_EVENT (
|
||||
pLogMessage VARCHAR2
|
||||
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_ERROR
|
||||
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||
* This procedure captures comprehensive error information for debugging purposes while
|
||||
* allowing clean user-facing error messages to be raised separately.
|
||||
* @param pLogMessage - Base error message description
|
||||
* @param pParameters - Procedure parameters for context
|
||||
* @param pProcessName - Name of the calling process/package
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||
*/
|
||||
PROCEDURE LOG_PROCESS_ERROR (
|
||||
pLogMessage VARCHAR2
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name INIT_ERRORS
|
||||
* @desc Loads data into Errors array.
|
||||
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||
* Called automatically during package initialization.
|
||||
* @example Called automatically when package is first referenced
|
||||
* @ex_rslt Errors array populated with all error codes and messages
|
||||
**/
|
||||
PROCEDURE INIT_ERRORS;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_DEFAULT_ENV
|
||||
* @desc It returns string with name of default environment.
|
||||
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||
* @ex_rslt dev
|
||||
**/
|
||||
FUNCTION GET_DEFAULT_ENV
|
||||
RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name INIT_VARIABLES
|
||||
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||
* Assign values to following global package variables:
|
||||
* - gvNameSpace
|
||||
* - gvRegion
|
||||
* - gvCredentialName
|
||||
* - gvInboxBucketName
|
||||
* - gvDataBucketName
|
||||
* - gvArchiveBucketName
|
||||
* - gvInboxBucketUri
|
||||
* - gvDataBucketUri
|
||||
* - gvArchiveBucketUri
|
||||
* - gvLoggingEnabled
|
||||
* - gvMinLogLevel
|
||||
* - gvDefaultDateFormat
|
||||
* - gvConsoleLoggingEnabled
|
||||
**/
|
||||
PROCEDURE INIT_VARIABLES(
|
||||
pEnv VARCHAR2
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_MESSAGE
|
||||
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||
* @ex_rslt File not found on the cloud
|
||||
**/
|
||||
FUNCTION GET_ERROR_MESSAGE(
|
||||
pCode PLS_INTEGER
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_STACK
|
||||
* @desc It returns string with all possible error stack info.
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example
|
||||
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||
* pFormat => 'OUTPUT'
|
||||
* ,pCode => -20009
|
||||
* ,pSourceFileReceivedKey => NULL)
|
||||
* from dual
|
||||
* @ex_rslt
|
||||
* ------------------------------------------------------+
|
||||
* Error Message:
|
||||
* ORA-0000: normal, successful completion
|
||||
* -------------------------------------------------------
|
||||
* Error Stack:
|
||||
* -------------------------------------------------------
|
||||
* Error Backtrace:
|
||||
* ------------------------------------------------------+
|
||||
**/
|
||||
FUNCTION GET_ERROR_STACK(
|
||||
pFormat VARCHAR2
|
||||
,pCode PLS_INTEGER
|
||||
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_PARAMETERS
|
||||
* @desc Formats parameter list for logging purposes.
|
||||
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||
* @ex_rslt param1=value1 ,
|
||||
* param2=NULL
|
||||
**/
|
||||
FUNCTION FORMAT_PARAMETERS(
|
||||
pParameterList SYS.ODCIVARCHAR2LIST
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name ANALYZE_VALIDATION_ERRORS
|
||||
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||
* Compares CSV structure with template table and provides specific error analysis.
|
||||
* Includes suggested solutions for common validation issues.
|
||||
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||
* @param pCsvFileUri - URI of CSV file being validated
|
||||
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||
**/
|
||||
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||
pValidationLogTable VARCHAR2,
|
||||
pTemplateSchema VARCHAR2,
|
||||
pTemplateTable VARCHAR2,
|
||||
pCsvFileUri VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name GET_VERSION
|
||||
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||
* @ex_rslt 3.0.0
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_BUILD_INFO
|
||||
* @desc Returns comprehensive build information including version, build date, and author.
|
||||
* Formatted for display in logs or monitoring systems.
|
||||
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||
* @ex_rslt Package: ENV_MANAGER
|
||||
* Version: 3.0.0
|
||||
* Build Date: 2025-10-22 16:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_VERSION_HISTORY
|
||||
* @desc Returns complete version history with all releases and changes.
|
||||
* Shows evolution of package features over time.
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||
* @ex_rslt ENV_MANAGER Version History:
|
||||
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_VERSION_INFO
|
||||
* @desc Universal function to get formatted version information for any package.
|
||||
* This centralized function is used by all packages in the system.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||
* @param pBuildDate - Build date timestamp
|
||||
* @param pAuthor - Package author name
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||
* @ex_rslt Package: FILE_MANAGER
|
||||
* Version: 2.1.0
|
||||
* Build Date: 2025-10-22 15:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName VARCHAR2,
|
||||
pVersion VARCHAR2,
|
||||
pBuildDate VARCHAR2,
|
||||
pAuthor VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_VERSION_HISTORY
|
||||
* @desc Universal function to format version history for any package.
|
||||
* Adds package name header and proper formatting.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersionHistory - Complete version history text
|
||||
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||
* @ex_rslt FILE_MANAGER Version History:
|
||||
* 2.1.0 (2025-10-22): Export procedures...
|
||||
**/
|
||||
FUNCTION FORMAT_VERSION_HISTORY(
|
||||
pPackageName VARCHAR2,
|
||||
pVersionHistory VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name CALCULATE_PACKAGE_HASH
|
||||
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||
* Returns hash for both SPEC and BODY (if exists).
|
||||
* Used for automatic change detection.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||
**/
|
||||
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name TRACK_PACKAGE_VERSION
|
||||
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||
* Automatically detects if source code changed without version update.
|
||||
* Should be called after every package deployment.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||
**/
|
||||
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageVersion VARCHAR2,
|
||||
pPackageBuildDate VARCHAR2,
|
||||
pPackageAuthor VARCHAR2
|
||||
);
|
||||
|
||||
/**
|
||||
* @name CHECK_PACKAGE_CHANGES
|
||||
* @desc Checks if package source code has changed since last tracking.
|
||||
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||
* Returns detailed change detection report.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt WARNING: Package changed without version update!
|
||||
* Last Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Hash (SPEC): B8C4D6E0...
|
||||
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||
**/
|
||||
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_HASH_INFO
|
||||
* @desc Returns formatted information about package hash and tracking history.
|
||||
* Includes current hash, last tracked hash, and change detection status.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||
* Current Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Tracked: 2025-10-22 16:30:00
|
||||
* Status: OK - No changes detected
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
END ENV_MANAGER;
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,239 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.9.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-13 14:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
|
||||
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10) ||
|
||||
'v2.8.0 (2026-02-12): MAJOR REFACTOR - EXPORT_TABLE_DATA now exports to single CSV file instead of partitioning by key values. Added pFileName parameter.' || CHR(10) ||
|
||||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
|
||||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into single CSV file on OCI infrastructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* Supports template table for column order and per-column date formatting.
|
||||
* When pRegisterExport=TRUE, successfully exported file is registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pFileName - Optional filename (e.g., 'export.csv'). NULL = auto-generate from table name
|
||||
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
|
||||
* - Column order control (template defines CSV structure)
|
||||
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
|
||||
* - NULL = use source table columns in natural order
|
||||
* @param pMaxFileSize - Maximum file size in bytes (default 104857600 = 100MB, min 10MB, max 1GB)
|
||||
* @param pRegisterExport - When TRUE, registers exported CSV file in A_SOURCE_FILE_RECEIVED table
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports',
|
||||
* pFileName => 'my_export.csv', -- Optional
|
||||
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
|
||||
* pMaxFileSize => 104857600, -- Optional, default 100MB
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 default NULL,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* When pRegisterExport=TRUE, successfully exported files are registered in:
|
||||
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
|
||||
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8, -- Optional, default 1, range 1-16
|
||||
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17',
|
||||
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pRegisterExport IN BOOLEAN default FALSE,
|
||||
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
||||
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||
* It is a standard.
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Example comment:
|
||||
/**
|
||||
* @name EX_PROCEDURE_NAME
|
||||
* @desc Procedure description
|
||||
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||
* @ex_rslt Example Result
|
||||
**/
|
||||
|
||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (Latest changes first)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||
|
||||
TYPE Error_Record IS RECORD (
|
||||
code PLS_INTEGER,
|
||||
message VARCHAR2(4000)
|
||||
);
|
||||
|
||||
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||
|
||||
Errors tErrorList;
|
||||
|
||||
|
||||
guid VARCHAR2(32);
|
||||
gvEnv VARCHAR2(200);
|
||||
gvUsername VARCHAR2(128);
|
||||
gvOsuser VARCHAR2(128);
|
||||
gvMachine VARCHAR2(64);
|
||||
gvModule VARCHAR2(64);
|
||||
|
||||
gvNameSpace VARCHAR2(200);
|
||||
gvRegion VARCHAR2(200);
|
||||
gvDataBucketName VARCHAR2(200);
|
||||
gvInboxBucketName VARCHAR2(200);
|
||||
gvArchiveBucketName VARCHAR2(200);
|
||||
gvDataBucketUri VARCHAR2(200);
|
||||
gvInboxBucketUri VARCHAR2(200);
|
||||
gvArchiveBucketUri VARCHAR2(200);
|
||||
gvCredentialName VARCHAR2(200);
|
||||
|
||||
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||
|
||||
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||
|
||||
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
|
||||
vgSourceFileConfigKey PLS_INTEGER;
|
||||
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
--Exceptions
|
||||
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||
|
||||
|
||||
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||
|
||||
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||
|
||||
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||
|
||||
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||
|
||||
|
||||
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||
|
||||
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||
|
||||
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||
,CODE_FILE_VALIDATION_FAILED);
|
||||
|
||||
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||
|
||||
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||
,CODE_NO_CONFIG_MATCH);
|
||||
|
||||
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||
,CODE_UNKNOWN_PREFIX);
|
||||
|
||||
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||
,CODE_TABLE_NOT_EXISTS);
|
||||
|
||||
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||
,CODE_COLUMN_NOT_EXISTS);
|
||||
|
||||
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||
|
||||
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||
,CODE_MISSING_SOURCE_KEY);
|
||||
|
||||
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||
|
||||
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||
,CODE_DUPLICATED_SOURCE_KEY);
|
||||
|
||||
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||
,CODE_MISSING_CONTAINER_CONFIG);
|
||||
|
||||
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||
|
||||
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||
,CODE_WRONG_DESTINATION_PARAM);
|
||||
|
||||
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||
|
||||
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||
,CODE_FILE_ALREADY_REGISTERED);
|
||||
|
||||
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||
|
||||
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||
||cgBL||' It can be set up in two different ways:'
|
||||
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||
;
|
||||
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||
,CODE_ENVIRONMENT_NOT_SET);
|
||||
|
||||
|
||||
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||
|
||||
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||
|
||||
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||
|
||||
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||
|
||||
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||
|
||||
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||
|
||||
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||
|
||||
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||
,CODE_INVALID_BUCKET_AREA);
|
||||
|
||||
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||
,CODE_INVALID_PARALLEL_DEGREE);
|
||||
|
||||
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||
|
||||
ERR_UNKNOWN EXCEPTION;
|
||||
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||
,CODE_UNKNOWN);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_EVENT
|
||||
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||
* Respects logging level configuration (gvMinLogLevel).
|
||||
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||
**/
|
||||
PROCEDURE LOG_PROCESS_EVENT (
|
||||
pLogMessage VARCHAR2
|
||||
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_ERROR
|
||||
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||
* This procedure captures comprehensive error information for debugging purposes while
|
||||
* allowing clean user-facing error messages to be raised separately.
|
||||
* @param pLogMessage - Base error message description
|
||||
* @param pParameters - Procedure parameters for context
|
||||
* @param pProcessName - Name of the calling process/package
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||
*/
|
||||
PROCEDURE LOG_PROCESS_ERROR (
|
||||
pLogMessage VARCHAR2
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name INIT_ERRORS
|
||||
* @desc Loads data into Errors array.
|
||||
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||
* Called automatically during package initialization.
|
||||
* @example Called automatically when package is first referenced
|
||||
* @ex_rslt Errors array populated with all error codes and messages
|
||||
**/
|
||||
PROCEDURE INIT_ERRORS;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_DEFAULT_ENV
|
||||
* @desc It returns string with name of default environment.
|
||||
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||
* @ex_rslt dev
|
||||
**/
|
||||
FUNCTION GET_DEFAULT_ENV
|
||||
RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name INIT_VARIABLES
|
||||
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||
* Assign values to following global package variables:
|
||||
* - gvNameSpace
|
||||
* - gvRegion
|
||||
* - gvCredentialName
|
||||
* - gvInboxBucketName
|
||||
* - gvDataBucketName
|
||||
* - gvArchiveBucketName
|
||||
* - gvInboxBucketUri
|
||||
* - gvDataBucketUri
|
||||
* - gvArchiveBucketUri
|
||||
* - gvLoggingEnabled
|
||||
* - gvMinLogLevel
|
||||
* - gvDefaultDateFormat
|
||||
* - gvConsoleLoggingEnabled
|
||||
**/
|
||||
PROCEDURE INIT_VARIABLES(
|
||||
pEnv VARCHAR2
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_MESSAGE
|
||||
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||
* @ex_rslt File not found on the cloud
|
||||
**/
|
||||
FUNCTION GET_ERROR_MESSAGE(
|
||||
pCode PLS_INTEGER
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_STACK
|
||||
* @desc It returns string with all possible error stack info.
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example
|
||||
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||
* pFormat => 'OUTPUT'
|
||||
* ,pCode => -20009
|
||||
* ,pSourceFileReceivedKey => NULL)
|
||||
* from dual
|
||||
* @ex_rslt
|
||||
* ------------------------------------------------------+
|
||||
* Error Message:
|
||||
* ORA-0000: normal, successful completion
|
||||
* -------------------------------------------------------
|
||||
* Error Stack:
|
||||
* -------------------------------------------------------
|
||||
* Error Backtrace:
|
||||
* ------------------------------------------------------+
|
||||
**/
|
||||
FUNCTION GET_ERROR_STACK(
|
||||
pFormat VARCHAR2
|
||||
,pCode PLS_INTEGER
|
||||
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_PARAMETERS
|
||||
* @desc Formats parameter list for logging purposes.
|
||||
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||
* @ex_rslt param1=value1 ,
|
||||
* param2=NULL
|
||||
**/
|
||||
FUNCTION FORMAT_PARAMETERS(
|
||||
pParameterList SYS.ODCIVARCHAR2LIST
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name ANALYZE_VALIDATION_ERRORS
|
||||
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||
* Compares CSV structure with template table and provides specific error analysis.
|
||||
* Includes suggested solutions for common validation issues.
|
||||
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||
* @param pCsvFileUri - URI of CSV file being validated
|
||||
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||
**/
|
||||
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||
pValidationLogTable VARCHAR2,
|
||||
pTemplateSchema VARCHAR2,
|
||||
pTemplateTable VARCHAR2,
|
||||
pCsvFileUri VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name GET_VERSION
|
||||
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||
* @ex_rslt 3.0.0
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_BUILD_INFO
|
||||
* @desc Returns comprehensive build information including version, build date, and author.
|
||||
* Formatted for display in logs or monitoring systems.
|
||||
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||
* @ex_rslt Package: ENV_MANAGER
|
||||
* Version: 3.0.0
|
||||
* Build Date: 2025-10-22 16:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_VERSION_HISTORY
|
||||
* @desc Returns complete version history with all releases and changes.
|
||||
* Shows evolution of package features over time.
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||
* @ex_rslt ENV_MANAGER Version History:
|
||||
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_VERSION_INFO
|
||||
* @desc Universal function to get formatted version information for any package.
|
||||
* This centralized function is used by all packages in the system.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||
* @param pBuildDate - Build date timestamp
|
||||
* @param pAuthor - Package author name
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||
* @ex_rslt Package: FILE_MANAGER
|
||||
* Version: 2.1.0
|
||||
* Build Date: 2025-10-22 15:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName VARCHAR2,
|
||||
pVersion VARCHAR2,
|
||||
pBuildDate VARCHAR2,
|
||||
pAuthor VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_VERSION_HISTORY
|
||||
* @desc Universal function to format version history for any package.
|
||||
* Adds package name header and proper formatting.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersionHistory - Complete version history text
|
||||
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||
* @ex_rslt FILE_MANAGER Version History:
|
||||
* 2.1.0 (2025-10-22): Export procedures...
|
||||
**/
|
||||
FUNCTION FORMAT_VERSION_HISTORY(
|
||||
pPackageName VARCHAR2,
|
||||
pVersionHistory VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name CALCULATE_PACKAGE_HASH
|
||||
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||
* Returns hash for both SPEC and BODY (if exists).
|
||||
* Used for automatic change detection.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||
**/
|
||||
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name TRACK_PACKAGE_VERSION
|
||||
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||
* Automatically detects if source code changed without version update.
|
||||
* Should be called after every package deployment.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||
**/
|
||||
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageVersion VARCHAR2,
|
||||
pPackageBuildDate VARCHAR2,
|
||||
pPackageAuthor VARCHAR2
|
||||
);
|
||||
|
||||
/**
|
||||
* @name CHECK_PACKAGE_CHANGES
|
||||
* @desc Checks if package source code has changed since last tracking.
|
||||
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||
* Returns detailed change detection report.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt WARNING: Package changed without version update!
|
||||
* Last Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Hash (SPEC): B8C4D6E0...
|
||||
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||
**/
|
||||
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_HASH_INFO
|
||||
* @desc Returns formatted information about package hash and tracking history.
|
||||
* Includes current hash, last tracked hash, and change detection status.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||
* Current Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Tracked: 2025-10-22 16:30:00
|
||||
* Status: OK - No changes detected
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
END ENV_MANAGER;
|
||||
/
|
||||
@@ -103,11 +103,13 @@ BEGIN
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
pRegisterExport => TRUE -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pProcessName => 'MARS-835', -- Process identifier for tracking
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to DATA bucket with template column order');
|
||||
@@ -128,7 +130,8 @@ BEGIN
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to HIST bucket with template column order');
|
||||
@@ -223,11 +226,13 @@ BEGIN
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT_DAILY',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
pRegisterExport => TRUE -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
|
||||
pProcessName => 'MARS-835', -- Process identifier for tracking
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to DATA bucket with template column order');
|
||||
@@ -248,7 +253,8 @@ BEGIN
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to HIST bucket with template column order');
|
||||
|
||||
@@ -33,9 +33,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_RAT_FULL exported to HIST bucket with template column order');
|
||||
@@ -60,9 +62,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_DESC_FULL exported to HIST bucket with template column order');
|
||||
@@ -87,9 +91,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_RAT_FULL exported to HIST bucket with template column order');
|
||||
@@ -114,9 +120,11 @@ BEGIN
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pMinDate => DATE '1900-01-01', -- Explicit start date for clarity
|
||||
pMaxDate => DATE '9999-12-31', -- Include future dates (MAX_LOAD_START can be beyond SYSDATE)
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL'
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL',
|
||||
pJobClass => 'high' -- Oracle Scheduler job class for resource management
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_DESC_FULL exported to HIST bucket with template column order');
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835 ROLLBACK: Delete File Registration Records
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Delete all file registration records from A_SOURCE_FILE_RECEIVED table for MARS-835 process
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-02-13
|
||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting file registration records from A_SOURCE_FILE_RECEIVED
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vRowCount NUMBER := 0;
|
||||
vStartTime TIMESTAMP := SYSTIMESTAMP;
|
||||
vEndTime TIMESTAMP;
|
||||
vElapsedSeconds NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting all MARS-835 file registrations from A_SOURCE_FILE_RECEIVED...');
|
||||
|
||||
-- Delete all records for MARS-835 process
|
||||
DELETE FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
|
||||
WHERE PROCESS_NAME = 'MARS-835';
|
||||
|
||||
vRowCount := SQL%ROWCOUNT;
|
||||
COMMIT;
|
||||
|
||||
vEndTime := SYSTIMESTAMP;
|
||||
vElapsedSeconds := EXTRACT(SECOND FROM (vEndTime - vStartTime)) +
|
||||
EXTRACT(MINUTE FROM (vEndTime - vStartTime)) * 60 +
|
||||
EXTRACT(HOUR FROM (vEndTime - vStartTime)) * 3600;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: File registration records deleted');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Records deleted: ' || vRowCount);
|
||||
DBMS_OUTPUT.PUT_LINE('Elapsed time: ' || ROUND(vElapsedSeconds, 2) || ' seconds');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================================================');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
ROLLBACK;
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to delete file registration records');
|
||||
DBMS_OUTPUT.PUT_LINE('Error message: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -22,25 +22,24 @@ DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT CSV files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_YYYYMM.csv OR LEGACY_DEBT_YYYYMM_1_20260122T...Z.csv (Oracle timestamp)
|
||||
-- Delete CSV files for DEBT from DATA bucket using LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name LIKE 'LEGACY_DEBT%'
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -48,6 +47,7 @@ BEGIN
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -58,19 +58,20 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_20260122T...Z.parquet (Oracle timestamp)
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT Parquet files from ARCHIVE bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
|
||||
vFileCount := 0;
|
||||
|
||||
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name NOT LIKE '%/' -- Exclude directories
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -78,6 +79,7 @@ BEGIN
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -88,7 +90,11 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT files deleted');
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT Parquet files found to delete');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
|
||||
END;
|
||||
/
|
||||
|
||||
@@ -104,25 +110,24 @@ DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY CSV files from DATA bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_DAILY_YYYYMM.csv OR LEGACY_DEBT_DAILY_YYYYMM_1_timestamp.csv
|
||||
-- Delete CSV files for DEBT_DAILY from DATA bucket using LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_DAILY_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY%'
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -130,6 +135,7 @@ BEGIN
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -140,19 +146,20 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_timestamp.parquet
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY Parquet files from ARCHIVE bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
|
||||
vFileCount := 0;
|
||||
|
||||
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
WHERE object_name NOT LIKE '%/' -- Exclude directories
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
@@ -160,6 +167,7 @@ BEGIN
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
vFileCount := vFileCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
@@ -170,7 +178,11 @@ BEGIN
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY files deleted');
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT_DAILY Parquet files found to delete');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
|
||||
END;
|
||||
/
|
||||
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
# MARS-835: One-Time CSDB Data Export from Operational Database to External Tables
|
||||
|
||||
## Overview
|
||||
This package performs a one-time bulk export of CSDB data from operational database tables (OU_CSDB schema) to new external tables in OCI buckets. The export uses DATA_EXPORTER v2.4.0 with per-column date format handling to move historical data to either DATA bucket (CSV format) or HIST bucket (Parquet format with Hive-style partitioning).
|
||||
|
||||
**Migration Strategy:**
|
||||
- **Split Export (2 tables)**: DEBT, DEBT_DAILY - Last 6 months → DATA (CSV), Older data → HIST (Parquet)
|
||||
- **HIST Only (4 tables)**: INSTR_RAT_FULL, INSTR_DESC_FULL, ISSUER_RAT_FULL, ISSUER_DESC_FULL - All data → HIST (Parquet)
|
||||
|
||||
**Key Transformations:**
|
||||
- Column rename: `A_ETL_LOAD_SET_FK` → `A_WORKFLOW_HISTORY_KEY` (all tables)
|
||||
- Column removal: DEBT (2 columns), DEBT_DAILY (6 columns) not required in new structure
|
||||
|
||||
## Contents
|
||||
- `install_mars835.sql` - Master installation script with SPOOL logging
|
||||
- `rollback_mars835.sql` - Master rollback script
|
||||
- `01_MARS_835_*.sql` - Individual installation scripts
|
||||
- `91_MARS_835_*.sql` - Individual rollback scripts
|
||||
- `track_package_versions.sql` - Package version tracking
|
||||
- `verify_packages_version.sql` - Package verification
|
||||
|
||||
## Prerequisites
|
||||
- Oracle Database 23ai
|
||||
- ADMIN user access (required for all MARS installations)
|
||||
- ENV_MANAGER v3.1.0+
|
||||
- Required schema privileges
|
||||
|
||||
## Installation
|
||||
|
||||
### Option 1: Master Script (Recommended)
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user for proper privilege management
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/install_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
# Log file created: log/INSTALL_MARS_835_<PDB>_<timestamp>.log
|
||||
```
|
||||
|
||||
### Option 2: Individual Scripts
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "01_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
Get-Content "02_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
# ... etc
|
||||
```
|
||||
|
||||
## Verification
|
||||
```sql
|
||||
-- Verify package versions
|
||||
SELECT PACKAGE_NAME.GET_VERSION() FROM DUAL;
|
||||
|
||||
-- Check for errors (ADMIN user checks specific schema)
|
||||
SELECT * FROM ALL_ERRORS
|
||||
WHERE OWNER = 'CT_MRDS' -- Replace with target schema
|
||||
AND NAME = 'PACKAGE_NAME';
|
||||
|
||||
-- Verify no untracked changes
|
||||
SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'PACKAGE_NAME') FROM DUAL;
|
||||
```
|
||||
|
||||
## Rollback
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/rollback_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
**NOTE**: Rollback for data exports is **NOT RECOMMENDED** as it would delete exported files from OCI buckets. Only use rollback if export failed and needs to be restarted.
|
||||
```
|
||||
|
||||
## Expected Changes
|
||||
|
||||
### Data Export Summary
|
||||
**6 CSDB tables exported from OU_CSDB schema:**
|
||||
|
||||
**Group 1: Split DATA + HIST (Time Critical)**
|
||||
1. **DEBT** - Last 6 months → DATA, Older → HIST
|
||||
2. **DEBT_DAILY** - Last 6 months → DATA, Older → HIST
|
||||
|
||||
**Group 2: HIST Only (Weekend Bulk)**
|
||||
3. **INSTR_RAT_FULL** - All data → HIST
|
||||
4. **INSTR_DESC_FULL** - All data → HIST
|
||||
5. **ISSUER_RAT_FULL** - All data → HIST
|
||||
6. **ISSUER_DESC_FULL** - All data → HIST
|
||||
|
||||
### Bucket Destinations (DEV environment)
|
||||
- **DATA Bucket**: `mrds_data_dev/ODS/CSDB/` (CSV format)
|
||||
- **HIST Bucket**: `mrds_hist_dev/ARCHIVE/CSDB/` (Parquet with partitioning)
|
||||
|
||||
### Column Mappings
|
||||
- **All tables**: `A_ETL_LOAD_SET_FK` renamed to `A_WORKFLOW_HISTORY_KEY`
|
||||
- **DEBT**: Removed columns: `IDIRDEPOSITORY`, `VA_BONDDURATION`
|
||||
- **DEBT_DAILY**: Removed columns: `STEPID`, `PROGRAMNAME`, `PROGRAMCEILING`, `PROGRAMSTATUS`, `ISSUERNACE21SECTOR`, `INSTRUMENTQUOTATIONBASIS`
|
||||
|
||||
## Testing
|
||||
|
||||
### Post-Export Verification
|
||||
|
||||
1. **Verify CSV files in DATA bucket** (DEBT, DEBT_DAILY - last 6 months):
|
||||
```sql
|
||||
-- Check exported files
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_data_dev/o/ODS/CSDB/'
|
||||
)) WHERE object_name LIKE '%CSDB_DEBT%';
|
||||
```
|
||||
|
||||
2. **Verify Parquet files in HIST bucket** (all 6 tables):
|
||||
```sql
|
||||
-- Check archived files with Hive partitioning
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_hist_dev/o/ARCHIVE/CSDB/'
|
||||
)) WHERE object_name LIKE '%PARTITION_YEAR=%';
|
||||
```
|
||||
|
||||
3. **Validate row counts match source tables**:
|
||||
```sql
|
||||
-- Compare counts between source and exported data
|
||||
SELECT COUNT(*) FROM OU_CSDB.DEBT;
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS; -- External table pointing to DATA
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ARCHIVE; -- External table pointing to HIST
|
||||
```
|
||||
|
||||
4. **Verify column mappings**:
|
||||
```sql
|
||||
-- Check A_WORKFLOW_HISTORY_KEY exists in exported data
|
||||
SELECT A_WORKFLOW_HISTORY_KEY, COUNT(*)
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE
|
||||
GROUP BY A_WORKFLOW_HISTORY_KEY;
|
||||
```
|
||||
|
||||
## Known Issues
|
||||
|
||||
### Timing Constraints
|
||||
- **DATA exports (DEBT, DEBT_DAILY)**: Must execute during parallel old+new loads phase after Production deployment
|
||||
- **HIST exports (all 6 tables)**: Can run anytime, recommended for weekend bulk execution to avoid interference
|
||||
|
||||
### Environment-Specific Configuration
|
||||
- Bucket names must be adjusted for each environment:
|
||||
- DEV: `mrds_data_dev`, `mrds_hist_dev`
|
||||
- TEST: `mrds_data_test`, `mrds_hist_test`
|
||||
- PROD: `mrds_data_prod`, `mrds_hist_prod`
|
||||
|
||||
### Data Cutoff Date
|
||||
- Export scripts use 6-month cutoff date calculated as `ADD_MONTHS(SYSDATE, -6)`
|
||||
- Verify cutoff aligns with business requirements before execution
|
||||
|
||||
### One-Time Execution
|
||||
- This is a **ONE-TIME data migration** package
|
||||
- After successful execution, package should be **deactivated** (moved to REL01_POST_DEACTIVATION)
|
||||
- Do not re-run unless explicitly required for data refresh
|
||||
|
||||
## Related
|
||||
- **JIRA**: MARS-835 - CSDB Data Export to External Tables
|
||||
- **Confluence**: FILE_MANAGER package - MRDS - Technical Team
|
||||
- **Confluence**: Table Setup Guide for FILE PROCESSOR System
|
||||
- **Source Schema**: OU_CSDB (Operational Database)
|
||||
- **Target Schema**: ODS (External Tables)
|
||||
- **Migration Type**: One-time bulk export (deactivated post-execution)
|
||||
|
||||
---
|
||||
|
||||
**Author:** Grzegorz Michalski
|
||||
**Date:** 2025-12-04
|
||||
**Version:** 1.0.0
|
||||
@@ -1,207 +0,0 @@
|
||||
# MARS-835: Required External Tables for Smart Column Mapping
|
||||
|
||||
## Overview
|
||||
This document lists all external tables required for MARS-835 data exports using DATA_EXPORTER v2.4.0 with Smart Column Mapping feature.
|
||||
|
||||
**Purpose**: Smart Column Mapping ensures CSV files are generated with columns in the EXACT order expected by external tables, preventing NULL values due to Oracle's positional CSV mapping.
|
||||
|
||||
---
|
||||
|
||||
## Required External Tables
|
||||
|
||||
### Group 1: DATA Bucket (CSV Format) - **CRITICAL**
|
||||
|
||||
#### 1. ODS.CSDB_DEBT_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 2. ODS.CSDB_DEBT_DAILY_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
### Group 2: ARCHIVE Bucket (Parquet Format) - **RECOMMENDED**
|
||||
|
||||
#### 3. ODS.CSDB_DEBT_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
- **Note**: Parquet uses schema-based mapping (column order less critical but Smart Column Mapping ensures consistency)
|
||||
|
||||
#### 4. ODS.CSDB_DEBT_DAILY_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 5. ODS.CSDB_INSTR_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 6. ODS.CSDB_INSTR_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 7. ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 8. ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
## External Table Column Order Requirements
|
||||
|
||||
### **CRITICAL for CSV Tables** (DATA bucket):
|
||||
|
||||
All CSV external tables MUST have **A_WORKFLOW_HISTORY_KEY at position 2**:
|
||||
|
||||
```
|
||||
Position 1: A_KEY (NUMBER)
|
||||
Position 2: A_WORKFLOW_HISTORY_KEY (NUMBER) ← MUST BE HERE!
|
||||
Position 3+: Other columns in any order
|
||||
```
|
||||
|
||||
**Reason**: Oracle External Tables with CSV format use **positional mapping** (ignore header row). If source table has A_ETL_LOAD_SET_FK at position 72, but CSV puts it at position 72 while external table expects A_WORKFLOW_HISTORY_KEY at position 2, the external table will try to read position 2 (which might be a DATE column) as NUMBER → conversion fails → NULL value.
|
||||
|
||||
**Solution**: Smart Column Mapping (v2.4.0) generates CSV columns in EXTERNAL TABLE order, ensuring position 2 has the correct NUMBER value.
|
||||
|
||||
### **OPTIONAL for Parquet Tables** (ARCHIVE bucket):
|
||||
|
||||
Parquet format uses **schema-based mapping** (column names). Column order doesn't matter, but Smart Column Mapping provides consistency.
|
||||
|
||||
---
|
||||
|
||||
## Creation Script Example
|
||||
|
||||
### CSV External Table (CRITICAL - Correct Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_DATA_ODS
|
||||
-- IMPORTANT: A_WORKFLOW_HISTORY_KEY must be at position 2!
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_DATA_ODS',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ODS/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvDataBucketUri,
|
||||
pFormat => 'CSV' -- Uses positional mapping!
|
||||
);
|
||||
END;
|
||||
/
|
||||
|
||||
-- Verify column order (A_WORKFLOW_HISTORY_KEY should be position 2)
|
||||
SELECT column_id, column_name, data_type
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = 'CSDB_DEBT_DATA_ODS'
|
||||
AND owner = 'ODS'
|
||||
ORDER BY column_id;
|
||||
```
|
||||
|
||||
### Parquet External Table (Optional Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_ARCHIVE
|
||||
-- Column order flexible (schema-based mapping)
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_ARCHIVE',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvArchiveBucketUri,
|
||||
pFormat => 'PARQUET' -- Uses schema-based mapping
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Template Tables Required
|
||||
|
||||
All external tables require corresponding template tables in CT_ET_TEMPLATES schema:
|
||||
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_DAILY_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL_TEMPLATE`
|
||||
|
||||
**Note**: Template tables must be created by ADMIN or CT_ET_TEMPLATES user (MRDS_LOADER cannot create them).
|
||||
|
||||
---
|
||||
|
||||
## Verification Checklist
|
||||
|
||||
Before running MARS-835 exports:
|
||||
|
||||
- [ ] All 8 external tables exist in ODS schema
|
||||
- [ ] CSV tables (DATA bucket) have A_WORKFLOW_HISTORY_KEY at position 2
|
||||
- [ ] Template tables exist in CT_ET_TEMPLATES schema
|
||||
- [ ] MRDS_LOADER has EXECUTE privilege on ODS.FILE_MANAGER_ODS
|
||||
- [ ] ODS schema has access to CT_MRDS.ENV_MANAGER for logging
|
||||
- [ ] DATA_EXPORTER v2.4.0 deployed with Smart Column Mapping feature
|
||||
|
||||
---
|
||||
|
||||
## Testing Verification
|
||||
|
||||
After export, verify A_WORKFLOW_HISTORY_KEY is not NULL:
|
||||
|
||||
```sql
|
||||
-- CSV tables (should be 100% populated)
|
||||
SELECT 'CSDB_DEBT_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DATA_ODS;
|
||||
|
||||
SELECT 'CSDB_DEBT_DAILY_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DAILY_DATA_ODS;
|
||||
|
||||
-- Parquet tables (should also be 100% populated)
|
||||
SELECT 'CSDB_DEBT_ARCHIVE' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE;
|
||||
```
|
||||
|
||||
**Expected Result**: SUCCESS_RATE_PCT = 100.00 for all tables
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [DATA_EXPORTER v2.4.0 Smart Column Mapping Examples](../MARS-835-PREHOOK/current_version/v2.3.0/DATA_EXPORTER_v2.4.0_Smart_Column_Mapping_Examples.sql)
|
||||
- [Oracle External Tables Column Order Issue](../../confluence/additions/Oracle_External_Tables_Column_Order_Issue.md)
|
||||
- [MARS-835 README](README.md)
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2026-01-09
|
||||
**Author**: GitHub Copilot (MARS-835 Update)
|
||||
@@ -59,7 +59,13 @@ PROMPT =========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Verify Rollback Completed
|
||||
PROMPT Step 3: Delete File Registration Records from A_SOURCE_FILE_RECEIVED
|
||||
PROMPT =========================================================================
|
||||
@@90_MARS_835_rollback_file_registrations.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Rollback Completed
|
||||
PROMPT =========================================================================
|
||||
@@99_MARS_835_verify_rollback.sql
|
||||
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- Add or remove entries as needed for your MARS issue
|
||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.FILE_MANAGER',
|
||||
'ODS.FILE_MANAGER_ODS'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT ' || vPackageList(i) || '.GET_VERSION() FROM DUAL'
|
||||
INTO vVersion;
|
||||
|
||||
vPackages.EXTEND;
|
||||
vPackages(vPackages.COUNT).owner := vOwner;
|
||||
vPackages(vPackages.COUNT).name := vPackageName;
|
||||
vPackages(vPackages.COUNT).version := vVersion;
|
||||
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
||||
pPackageAuthor => 'Grzegorz Michalski'
|
||||
);
|
||||
vCount := vCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Error tracking ' || vPackageList(i) || ': ' || SQLERRM);
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- Display results
|
||||
IF vPackages.COUNT > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || ' of ' || vPackages.COUNT);
|
||||
FOR i IN 1..vPackages.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).name ||
|
||||
' (v' || vPackages(i).version || ')');
|
||||
END LOOP;
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('No packages found in list');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
@@ -1,62 +0,0 @@
|
||||
-- ===================================================================
|
||||
-- Universal Package Version Verification Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify all tracked Oracle packages for code changes
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- USAGE:
|
||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
||||
--
|
||||
-- OUTPUT:
|
||||
-- - List of all tracked packages with their current status
|
||||
-- - OK: Package has not changed since last tracking
|
||||
-- - WARNING: Package code changed without version update
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET FEEDBACK OFF
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Package Version Verification
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
|
||||
COLUMN PACKAGE_OWNER FORMAT A15
|
||||
COLUMN PACKAGE_NAME FORMAT A20
|
||||
COLUMN VERSION FORMAT A10
|
||||
COLUMN STATUS FORMAT A80
|
||||
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION AS VERSION,
|
||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
||||
FROM (
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION,
|
||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
)
|
||||
WHERE RN = 1
|
||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Verification Complete
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
PROMPT Legend:
|
||||
PROMPT OK - Package has not changed since last tracking
|
||||
PROMPT WARNING - Package code changed without version update
|
||||
PROMPT
|
||||
PROMPT For detailed hash information, use:
|
||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
||||
PROMPT ========================================
|
||||
|
||||
SET FEEDBACK ON
|
||||
Reference in New Issue
Block a user