Przeniesienie
This commit is contained in:
5
MARS_Packages/REL01_ADDITIONS/MARS-835/.gitignore
vendored
Normal file
5
MARS_Packages/REL01_ADDITIONS/MARS-835/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Exclude temporary folders from version control
|
||||
confluence/
|
||||
log/
|
||||
test/
|
||||
mock_data/
|
||||
@@ -0,0 +1,176 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 00_MARS_835_pre_check_existing_files.sql
|
||||
-- Purpose: Display existing archive files in DATA and HIST buckets before export
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Target Locations: mrds_data_dev/ODS/CSDB/, mrds_hist_dev/ARCHIVE/CSDB/
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Pre-Check: Listing existing CSDB files in DATA and HIST buckets
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalFiles NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vDataFolders t_folder_list;
|
||||
vHistFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential from FILE_MANAGER configuration
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('CHECK TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Credential: ' || vCredentialName);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder lists for CSDB tables
|
||||
-- DATA bucket folders (CSV format - only DEBT and DEBT_DAILY)
|
||||
vDataFolders := t_folder_list(
|
||||
'ODS/CSDB/CSDB_DEBT/',
|
||||
'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
);
|
||||
|
||||
-- HIST bucket folders (Parquet format - all 6 tables)
|
||||
vHistFolders := t_folder_list(
|
||||
'ARCHIVE/CSDB/CSDB_DEBT/',
|
||||
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket (CSV format - last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check DATA bucket folders
|
||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i));
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || vDataFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.csv'
|
||||
ORDER BY created DESC
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalFiles := vTotalFiles + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' No CSV files found (OK - clean folder)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' >> WARNING: Found ' || vFileCount || ' existing CSV files');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Folder does not exist (OK - will be created during export)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR checking folder: ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (Parquet format with Hive partitioning)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check HIST bucket folders
|
||||
FOR i IN 1..vHistFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vHistFolders(i));
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || vHistFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY created DESC
|
||||
FETCH FIRST 5 ROWS ONLY
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalFiles := vTotalFiles + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' No Parquet files found (OK - clean folder)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' >> WARNING: Found Parquet files (showing first 5)');
|
||||
DBMS_OUTPUT.PUT_LINE(' >> Total files in folder: Check bucket manually for complete count');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Folder does not exist (OK - will be created during export)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR checking folder: ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Pre-Check Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Total files found: ' || vTotalFiles);
|
||||
|
||||
IF vTotalFiles > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Existing files detected in buckets');
|
||||
DBMS_OUTPUT.PUT_LINE(' Export will ADD new files to existing ones.');
|
||||
DBMS_OUTPUT.PUT_LINE(' NOTE: Rollback will NOT delete pre-existing files.');
|
||||
DBMS_OUTPUT.PUT_LINE(' Record count verification may show MISMATCH due to pre-existing data.');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('OK: No existing files found - clean export environment');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during pre-check: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('SQLCODE: ' || SQLCODE);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Pre-check completed
|
||||
PROMPT
|
||||
@@ -0,0 +1,265 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835: Export Group 1 - Split DATA + HIST (DEBT, DEBT_DAILY)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Export last 6 months to DATA bucket (CSV), older data to HIST bucket (Parquet)
|
||||
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
-- Excludes legacy columns not required in new structure
|
||||
-- USES: DATA_EXPORTER v2.4.0 with pTemplateTableName for column order and date formats
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Updated: 2026-01-11 (Updated to DATA_EXPORTER v2.4.0 with pTemplateTableName)
|
||||
-- Related: MARS-835 - CSDB Data Export
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
DEFINE cutoff_date = "TRUNC(ADD_MONTHS(SYSDATE, -6), 'MM')"
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.DEBT - Split DATA + HIST
|
||||
PROMPT ========================================================================
|
||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT Excluded columns: IDIRDEPOSITORY, VA_BONDDURATION
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- PRE-EXPORT CHECK: List existing files and count records
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vRecordCount NUMBER := 0;
|
||||
vLocationUri VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Get bucket URI for DATA bucket
|
||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT/';
|
||||
|
||||
-- Count existing files
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- List existing files
|
||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||
END LOOP;
|
||||
|
||||
-- Count records in external table
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS' INTO vRecordCount;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
END;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export recent data to DATA bucket (CSV)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to DATA bucket (last 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||
pMaxFileSize => 104857600 -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to DATA bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export historical data to HIST bucket (Parquet)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to HIST bucket (older than 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_DEBT_DAILY - Split DATA + HIST
|
||||
PROMPT ========================================================================
|
||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT Excluded columns: STEPID, PROGRAMNAME, PROGRAMCEILING, PROGRAMSTATUS,
|
||||
PROMPT ISSUERNACE21SECTOR, INSTRUMENTQUOTATIONBASIS
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- PRE-EXPORT CHECK: List existing files and count records
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vRecordCount NUMBER := 0;
|
||||
vLocationUri VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Get bucket URI for DATA bucket
|
||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT_DAILY/';
|
||||
|
||||
-- Count existing files
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- List existing files
|
||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||
END LOOP;
|
||||
|
||||
-- Count records in external table
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_DAILY_ODS' INTO vRecordCount;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('>>> Records currently readable via external table: ' || vRecordCount);
|
||||
DBMS_OUTPUT.PUT_LINE('>>>');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
END;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export recent data to DATA bucket (CSV)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to DATA bucket (last 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT_DAILY',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT_DAILY',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||
pMaxFileSize => 104857600 -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to DATA bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export historical data to HIST bucket (Parquet)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to HIST bucket (older than 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT_DAILY',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 16,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Group 1 Export Completed
|
||||
PROMPT ========================================================================
|
||||
PROMPT - LEGACY_DEBT: DATA + HIST exported
|
||||
PROMPT - LEGACY_DEBT_DAILY: DATA + HIST exported
|
||||
PROMPT ========================================================================
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,137 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835: Export Group 2 - HIST Only (4 tables)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Export all data to HIST bucket (Parquet with Hive partitioning)
|
||||
-- Tables: INSTR_RAT_FULL, INSTR_DESC_FULL, ISSUER_RAT_FULL, ISSUER_DESC_FULL
|
||||
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
-- No column exclusions for these tables
|
||||
-- USES: DATA_EXPORTER v2.4.0 with per-column date format handling
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Updated: 2026-01-11 (Updated to DATA_EXPORTER v2.4.0 with pTemplateTableName)
|
||||
-- Related: MARS-835 - CSDB Data Export
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_INSTR_RAT_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_INSTR_RAT_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_INSTR_RAT_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_RAT_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_INSTR_DESC_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_INSTR_DESC_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_INSTR_DESC_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_DESC_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_ISSUER_RAT_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_ISSUER_RAT_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_ISSUER_RAT_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_RAT_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_ISSUER_DESC_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_ISSUER_DESC_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_ISSUER_DESC_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_DESC_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Group 2 Export Completed
|
||||
PROMPT ========================================================================
|
||||
PROMPT - LEGACY_INSTR_RAT_FULL: HIST exported
|
||||
PROMPT - LEGACY_INSTR_DESC_FULL: HIST exported
|
||||
PROMPT - LEGACY_ISSUER_RAT_FULL: HIST exported
|
||||
PROMPT - LEGACY_ISSUER_DESC_FULL: HIST exported
|
||||
PROMPT ========================================================================
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,193 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 03_MARS_835_verify_exports.sql
|
||||
-- Purpose: Verify exported files exist in DATA and HIST buckets after export
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Target Locations: mrds_data_dev/ODS/CSDB/, mrds_hist_dev/ARCHIVE/CSDB/
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Verification: Listing exported files in DATA and HIST buckets
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalDataFiles NUMBER := 0;
|
||||
vTotalHistFiles NUMBER := 0;
|
||||
vTotalDataSize NUMBER := 0;
|
||||
vTotalHistSize NUMBER := 0;
|
||||
|
||||
TYPE t_folder_info IS RECORD (
|
||||
folder_name VARCHAR2(200),
|
||||
table_name VARCHAR2(100),
|
||||
expected_format VARCHAR2(20)
|
||||
);
|
||||
TYPE t_folder_list IS TABLE OF t_folder_info;
|
||||
|
||||
vDataFolders t_folder_list;
|
||||
vHistFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential from FILE_MANAGER
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder lists
|
||||
vDataFolders := t_folder_list(
|
||||
t_folder_info('ODS/CSDB/CSDB_DEBT/', 'DEBT', 'CSV'),
|
||||
t_folder_info('ODS/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'CSV')
|
||||
);
|
||||
|
||||
vHistFolders := t_folder_list(
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT/', 'DEBT', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/', 'INSTR_RAT_FULL', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/', 'INSTR_DESC_FULL', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/', 'ISSUER_RAT_FULL', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/', 'ISSUER_DESC_FULL', 'Parquet')
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket Exports (CSV format - last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check DATA bucket exports
|
||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Table: ' || vDataFolders(i).table_name || ' (' || vDataFolders(i).expected_format || ')');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i).folder_name);
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || vDataFolders(i).folder_name
|
||||
))
|
||||
WHERE object_name LIKE '%.csv'
|
||||
ORDER BY created DESC
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalDataFiles := vTotalDataFiles + 1;
|
||||
vTotalDataSize := vTotalDataSize + rec.bytes;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] No CSV files found - Export may have failed!');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || ' CSV file(s)');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] Cannot access folder - ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket Exports (Parquet with Hive partitioning)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check HIST bucket exports
|
||||
FOR i IN 1..vHistFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Table: ' || vHistFolders(i).table_name || ' (' || vHistFolders(i).expected_format || ')');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vHistFolders(i).folder_name);
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || vHistFolders(i).folder_name
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY created DESC
|
||||
FETCH FIRST 10 ROWS ONLY
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalHistFiles := vTotalHistFiles + 1;
|
||||
vTotalHistSize := vTotalHistSize + rec.bytes;
|
||||
|
||||
IF vFileCount <= 5 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] No Parquet files found - Export may have failed!');
|
||||
ELSIF vFileCount > 5 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || '+ Parquet files (showing first 5)');
|
||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Check Hive partitioning: PARTITION_YEAR=YYYY/PARTITION_MONTH=MM/');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || ' Parquet file(s)');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] Cannot access folder - ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Export Verification Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket (CSV):');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalDataFiles);
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalDataSize/1024/1024/1024, 2) || ' GB');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 2 (DEBT, DEBT_DAILY - last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket (Parquet):');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalHistFiles || '+');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalHistSize/1024/1024/1024, 2) || '+ GB (sample)');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 6 (all CSDB tables with historical data)');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vTotalDataFiles >= 2 AND vTotalHistFiles >= 6 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[SUCCESS] OVERALL STATUS: Export appears SUCCESSFUL');
|
||||
DBMS_OUTPUT.PUT_LINE(' Files found in both DATA and HIST buckets');
|
||||
DBMS_OUTPUT.PUT_LINE(' Proceed to record count verification (Step 4)');
|
||||
ELSIF vTotalDataFiles = 0 AND vTotalHistFiles = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[FAILED] OVERALL STATUS: Export FAILED');
|
||||
DBMS_OUTPUT.PUT_LINE(' No files found in either bucket');
|
||||
DBMS_OUTPUT.PUT_LINE(' Review export logs for errors');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[WARNING] OVERALL STATUS: Partial export detected');
|
||||
DBMS_OUTPUT.PUT_LINE(' Some files missing - review export logs');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during verification: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Export verification completed
|
||||
PROMPT
|
||||
@@ -0,0 +1,226 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 04_MARS_835_verify_record_counts.sql
|
||||
-- Purpose: Verify record counts match between source tables and exported data
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Verification: Compare OU_CSDB source tables with ODS external tables
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK OFF;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Record Count Verification
|
||||
PROMPT =====================================================================================
|
||||
PROMPT Comparing source table counts with exported external table counts
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
TYPE t_table_info IS RECORD (
|
||||
source_schema VARCHAR2(50),
|
||||
source_table VARCHAR2(100),
|
||||
data_external_table VARCHAR2(100),
|
||||
hist_external_table VARCHAR2(100),
|
||||
has_data_export BOOLEAN,
|
||||
has_hist_export BOOLEAN
|
||||
);
|
||||
TYPE t_table_list IS TABLE OF t_table_info;
|
||||
|
||||
vTables t_table_list;
|
||||
vSourceCount NUMBER;
|
||||
vDataCount NUMBER;
|
||||
vHistCount NUMBER;
|
||||
vTotalSourceCount NUMBER := 0;
|
||||
vTotalDataCount NUMBER := 0;
|
||||
vTotalHistCount NUMBER := 0;
|
||||
vMismatchCount NUMBER := 0;
|
||||
vSql VARCHAR2(4000);
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize table list with export configuration
|
||||
vTables := t_table_list(
|
||||
t_table_info('OU_CSDB', 'LEGACY_DEBT', 'ODS.CSDB_DEBT_ODS', 'ODS.CSDB_DEBT_ARCHIVE', TRUE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_DEBT_DAILY', 'ODS.CSDB_DEBT_DAILY_ODS', 'ODS.CSDB_DEBT_DAILY_ARCHIVE', TRUE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_RAT_FULL', NULL, 'ODS.CSDB_INSTR_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_DESC_FULL', NULL, 'ODS.CSDB_INSTR_DESC_FULL_ARCHIVE', FALSE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_RAT_FULL', NULL, 'ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_DESC_FULL', NULL, 'ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE', FALSE, TRUE)
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('Table Name Source Count DATA Count HIST Count Status');
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
|
||||
FOR i IN 1..vTables.COUNT LOOP
|
||||
-- Get source table count
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).source_schema || '.' || vTables(i).source_table;
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vSourceCount;
|
||||
vTotalSourceCount := vTotalSourceCount + vSourceCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
vSourceCount := -1;
|
||||
DBMS_OUTPUT.PUT_LINE(RPAD(vTables(i).source_table, 24) || 'ERROR: Cannot access source table');
|
||||
CONTINUE;
|
||||
END;
|
||||
|
||||
-- Get DATA external table count (if applicable)
|
||||
IF vTables(i).has_data_export THEN
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).data_external_table;
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vDataCount;
|
||||
vTotalDataCount := vTotalDataCount + vDataCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
-- If source table is empty (0 records), no files were exported
|
||||
-- External table returns error, treat as 0
|
||||
-- Acceptable error codes:
|
||||
-- ORA-29913: error in executing ODCIEXTTABLEOPEN callout
|
||||
-- ORA-29400: data cartridge error
|
||||
-- KUP-13023: nothing matched wildcard query (no files in bucket)
|
||||
-- NOTE: ORA-30653 (reject limit) is a real data quality error, not treated as empty
|
||||
IF vSourceCount = 0 OR SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||
vDataCount := 0;
|
||||
ELSE
|
||||
vDataCount := -1;
|
||||
END IF;
|
||||
END;
|
||||
ELSE
|
||||
vDataCount := NULL;
|
||||
END IF;
|
||||
|
||||
-- Get HIST external table count
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).hist_external_table;
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vHistCount;
|
||||
vTotalHistCount := vTotalHistCount + vHistCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
-- If source table is empty (0 records), no files were exported
|
||||
-- External table returns error, treat as 0
|
||||
-- Acceptable error codes:
|
||||
-- ORA-29913: error in executing ODCIEXTTABLEOPEN callout
|
||||
-- ORA-29400: data cartridge error
|
||||
-- KUP-13023: nothing matched wildcard query (no files in bucket)
|
||||
-- NOTE: ORA-30653 (reject limit) is a real data quality error, not treated as empty
|
||||
IF vSourceCount = 0 OR SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' THEN
|
||||
vHistCount := 0;
|
||||
ELSE
|
||||
vHistCount := -1;
|
||||
END IF;
|
||||
END;
|
||||
|
||||
-- Display results
|
||||
DECLARE
|
||||
vStatus VARCHAR2(20);
|
||||
vDataDisplay VARCHAR2(17);
|
||||
vHistDisplay VARCHAR2(17);
|
||||
BEGIN
|
||||
-- Format DATA count display
|
||||
IF vDataCount IS NULL THEN
|
||||
vDataDisplay := 'N/A';
|
||||
ELSIF vDataCount = -1 THEN
|
||||
vDataDisplay := 'ERROR';
|
||||
ELSE
|
||||
vDataDisplay := TO_CHAR(vDataCount, '9,999,999,999');
|
||||
END IF;
|
||||
|
||||
-- Format HIST count display
|
||||
IF vHistCount = -1 THEN
|
||||
vHistDisplay := 'ERROR';
|
||||
ELSE
|
||||
vHistDisplay := TO_CHAR(vHistCount, '9,999,999,999');
|
||||
END IF;
|
||||
|
||||
-- Determine status
|
||||
IF vTables(i).has_data_export THEN
|
||||
-- Split export: check DATA + HIST = SOURCE
|
||||
IF (vDataCount + vHistCount) = vSourceCount THEN
|
||||
vStatus := 'PASS';
|
||||
ELSIF vDataCount = -1 OR vHistCount = -1 THEN
|
||||
vStatus := 'ERROR';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
ELSE
|
||||
vStatus := 'MISMATCH';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
END IF;
|
||||
ELSE
|
||||
-- HIST only: check HIST = SOURCE
|
||||
IF vHistCount = vSourceCount THEN
|
||||
vStatus := 'PASS';
|
||||
ELSIF vHistCount = -1 THEN
|
||||
vStatus := 'ERROR';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
ELSE
|
||||
vStatus := 'MISMATCH';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE(
|
||||
RPAD(vTables(i).source_table, 24) ||
|
||||
LPAD(TO_CHAR(vSourceCount, '9,999,999,999'), 15) ||
|
||||
LPAD(vDataDisplay, 15) ||
|
||||
LPAD(vHistDisplay, 15) || ' ' ||
|
||||
vStatus
|
||||
);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE(
|
||||
RPAD('TOTALS', 24) ||
|
||||
LPAD(TO_CHAR(vTotalSourceCount, '9,999,999,999'), 15) ||
|
||||
LPAD(TO_CHAR(vTotalDataCount, '9,999,999,999'), 15) ||
|
||||
LPAD(TO_CHAR(vTotalHistCount, '9,999,999,999'), 15)
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('-----------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '9,999,999,999'));
|
||||
DBMS_OUTPUT.PUT_LINE('Total DATA records: ' || TO_CHAR(vTotalDataCount, '9,999,999,999') || ' (last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('Total HIST records: ' || TO_CHAR(vTotalHistCount, '9,999,999,999') || ' (historical + full exports)');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vMismatchCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[PASS] VERIFICATION PASSED');
|
||||
DBMS_OUTPUT.PUT_LINE(' All record counts match between source and exported data');
|
||||
DBMS_OUTPUT.PUT_LINE(' Export completed successfully');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[INFO] VERIFICATION COMPLETED WITH MISMATCHES');
|
||||
DBMS_OUTPUT.PUT_LINE(' Found ' || vMismatchCount || ' table(s) with count mismatches');
|
||||
DBMS_OUTPUT.PUT_LINE(' NOTE: Mismatches may be caused by pre-existing files in buckets (see pre-check)');
|
||||
DBMS_OUTPUT.PUT_LINE(' Review export logs and pre-check results before re-running exports');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Legend:');
|
||||
DBMS_OUTPUT.PUT_LINE(' PASS - Record counts match (export successful)');
|
||||
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
||||
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (may not exist yet)');
|
||||
DBMS_OUTPUT.PUT_LINE(' N/A - Not applicable (table not exported to DATA)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during record count verification: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
SET FEEDBACK ON;
|
||||
|
||||
PROMPT
|
||||
PROMPT Record count verification completed
|
||||
PROMPT
|
||||
@@ -0,0 +1,181 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835 ROLLBACK: Delete Group 1 Exported Files (DEBT, DEBT_DAILY)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Delete exported CSV and Parquet files from DATA and HIST buckets
|
||||
-- WARNING: This will permanently delete exported data files!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting DEBT exported files
|
||||
PROMPT ========================================================================
|
||||
PROMPT WARNING: This will delete files from:
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT/
|
||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from DATA bucket...');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_YYYYMM.csv OR LEGACY_DEBT_YYYYMM_1_20260122T...Z.csv (Oracle timestamp)
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from HIST bucket...');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_20260122T...Z.parquet (Oracle timestamp)
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting DEBT_DAILY exported files
|
||||
PROMPT ========================================================================
|
||||
PROMPT WARNING: This will delete files from:
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/
|
||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from DATA bucket...');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_DAILY_YYYYMM.csv OR LEGACY_DEBT_DAILY_YYYYMM_1_timestamp.csv
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_DAILY_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from HIST bucket...');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_timestamp.parquet
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT SUCCESS: Group 1 rollback completed
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,193 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835 ROLLBACK: Delete Group 2 Exported Files (4 HIST-only tables)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Delete exported Parquet files from HIST bucket
|
||||
-- WARNING: This will permanently delete exported data files!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting INSTR_RAT_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting INSTR_RAT_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_RAT_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting INSTR_DESC_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting INSTR_DESC_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_DESC_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting ISSUER_RAT_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting ISSUER_RAT_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_RAT_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting ISSUER_DESC_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting ISSUER_DESC_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_DESC_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT SUCCESS: Group 2 rollback completed
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,176 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 99_MARS_835_verify_rollback.sql
|
||||
-- Purpose: Verify all exported files have been deleted from DATA and HIST buckets
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Verification: Confirm complete rollback (no CSDB files remaining)
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Rollback Verification
|
||||
PROMPT =====================================================================================
|
||||
PROMPT Checking that all CSDB export files have been deleted
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vDataFileCount NUMBER := 0;
|
||||
vHistFileCount NUMBER := 0;
|
||||
vTotalFiles NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vDataFolders t_folder_list;
|
||||
vHistFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URIs
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder lists
|
||||
vDataFolders := t_folder_list(
|
||||
'ODS/CSDB/CSDB_DEBT/',
|
||||
'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
);
|
||||
|
||||
vHistFolders := t_folder_list(
|
||||
'ARCHIVE/CSDB/CSDB_DEBT/',
|
||||
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket (should be empty)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check DATA bucket
|
||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
||||
DECLARE
|
||||
vCount NUMBER := 0;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i));
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || vDataFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.csv'
|
||||
) LOOP
|
||||
vCount := vCount + 1;
|
||||
vDataFileCount := vDataFileCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [FOUND] ' || rec.object_name);
|
||||
END LOOP;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] No CSV files found');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Found ' || vCount || ' file(s) - may be pre-existing files from before installation');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] Folder does not exist or is empty');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (should be empty)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check HIST bucket
|
||||
FOR i IN 1..vHistFolders.COUNT LOOP
|
||||
DECLARE
|
||||
vCount NUMBER := 0;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vHistFolders(i));
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || vHistFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
FETCH FIRST 10 ROWS ONLY
|
||||
) LOOP
|
||||
vCount := vCount + 1;
|
||||
vHistFileCount := vHistFileCount + 1;
|
||||
IF vCount <= 5 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [FOUND] ' || rec.object_name);
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] No Parquet files found');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Found ' || vCount || '+ file(s) (showing first 5) - may be pre-existing files from before installation');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] Folder does not exist or is empty');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalFiles := vDataFileCount + vHistFileCount;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Rollback Verification Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('DATA bucket files remaining: ' || vDataFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST bucket files remaining: ' || vHistFileCount || '+');
|
||||
DBMS_OUTPUT.PUT_LINE('Total files found: ' || vTotalFiles || '+');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vTotalFiles = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[PASSED] ROLLBACK VERIFICATION PASSED');
|
||||
DBMS_OUTPUT.PUT_LINE(' All CSDB export files have been deleted or were not created');
|
||||
DBMS_OUTPUT.PUT_LINE(' Buckets are clean and ready for re-export if needed');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[INFO] ROLLBACK VERIFICATION COMPLETED');
|
||||
DBMS_OUTPUT.PUT_LINE(' Found ' || vTotalFiles || '+ file(s) remaining in buckets');
|
||||
DBMS_OUTPUT.PUT_LINE(' NOTE: These may be pre-existing files from before installation.');
|
||||
DBMS_OUTPUT.PUT_LINE(' Rollback only deletes files created during this export operation.');
|
||||
DBMS_OUTPUT.PUT_LINE(' If needed, manually verify and clean up remaining files.');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during rollback verification: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Rollback verification completed
|
||||
PROMPT
|
||||
165
MARS_Packages/REL01_ADDITIONS/MARS-835/README.md
Normal file
165
MARS_Packages/REL01_ADDITIONS/MARS-835/README.md
Normal file
@@ -0,0 +1,165 @@
|
||||
# MARS-835: One-Time CSDB Data Export from Operational Database to External Tables
|
||||
|
||||
## Overview
|
||||
This package performs a one-time bulk export of CSDB data from operational database tables (OU_CSDB schema) to new external tables in OCI buckets. The export uses DATA_EXPORTER v2.4.0 with per-column date format handling to move historical data to either DATA bucket (CSV format) or HIST bucket (Parquet format with Hive-style partitioning).
|
||||
|
||||
**Migration Strategy:**
|
||||
- **Split Export (2 tables)**: DEBT, DEBT_DAILY - Last 6 months → DATA (CSV), Older data → HIST (Parquet)
|
||||
- **HIST Only (4 tables)**: INSTR_RAT_FULL, INSTR_DESC_FULL, ISSUER_RAT_FULL, ISSUER_DESC_FULL - All data → HIST (Parquet)
|
||||
|
||||
**Key Transformations:**
|
||||
- Column rename: `A_ETL_LOAD_SET_FK` → `A_WORKFLOW_HISTORY_KEY` (all tables)
|
||||
- Column removal: DEBT (2 columns), DEBT_DAILY (6 columns) not required in new structure
|
||||
|
||||
## Contents
|
||||
- `install_mars835.sql` - Master installation script with SPOOL logging
|
||||
- `rollback_mars835.sql` - Master rollback script
|
||||
- `01_MARS_835_*.sql` - Individual installation scripts
|
||||
- `91_MARS_835_*.sql` - Individual rollback scripts
|
||||
- `track_package_versions.sql` - Package version tracking
|
||||
- `verify_packages_version.sql` - Package verification
|
||||
|
||||
## Prerequisites
|
||||
- Oracle Database 23ai
|
||||
- ADMIN user access (required for all MARS installations)
|
||||
- ENV_MANAGER v3.1.0+
|
||||
- Required schema privileges
|
||||
|
||||
## Installation
|
||||
|
||||
### Option 1: Master Script (Recommended)
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user for proper privilege management
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/install_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
# Log file created: log/INSTALL_MARS_835_<PDB>_<timestamp>.log
|
||||
```
|
||||
|
||||
### Option 2: Individual Scripts
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "01_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
Get-Content "02_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
# ... etc
|
||||
```
|
||||
|
||||
## Verification
|
||||
```sql
|
||||
-- Verify package versions
|
||||
SELECT PACKAGE_NAME.GET_VERSION() FROM DUAL;
|
||||
|
||||
-- Check for errors (ADMIN user checks specific schema)
|
||||
SELECT * FROM ALL_ERRORS
|
||||
WHERE OWNER = 'CT_MRDS' -- Replace with target schema
|
||||
AND NAME = 'PACKAGE_NAME';
|
||||
|
||||
-- Verify no untracked changes
|
||||
SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'PACKAGE_NAME') FROM DUAL;
|
||||
```
|
||||
|
||||
## Rollback
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/rollback_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
**NOTE**: Rollback for data exports is **NOT RECOMMENDED** as it would delete exported files from OCI buckets. Only use rollback if export failed and needs to be restarted.
|
||||
```
|
||||
|
||||
## Expected Changes
|
||||
|
||||
### Data Export Summary
|
||||
**6 CSDB tables exported from OU_CSDB schema:**
|
||||
|
||||
**Group 1: Split DATA + HIST (Time Critical)**
|
||||
1. **DEBT** - Last 6 months → DATA, Older → HIST
|
||||
2. **DEBT_DAILY** - Last 6 months → DATA, Older → HIST
|
||||
|
||||
**Group 2: HIST Only (Weekend Bulk)**
|
||||
3. **INSTR_RAT_FULL** - All data → HIST
|
||||
4. **INSTR_DESC_FULL** - All data → HIST
|
||||
5. **ISSUER_RAT_FULL** - All data → HIST
|
||||
6. **ISSUER_DESC_FULL** - All data → HIST
|
||||
|
||||
### Bucket Destinations (DEV environment)
|
||||
- **DATA Bucket**: `mrds_data_dev/ODS/CSDB/` (CSV format)
|
||||
- **HIST Bucket**: `mrds_hist_dev/ARCHIVE/CSDB/` (Parquet with partitioning)
|
||||
|
||||
### Column Mappings
|
||||
- **All tables**: `A_ETL_LOAD_SET_FK` renamed to `A_WORKFLOW_HISTORY_KEY`
|
||||
- **DEBT**: Removed columns: `IDIRDEPOSITORY`, `VA_BONDDURATION`
|
||||
- **DEBT_DAILY**: Removed columns: `STEPID`, `PROGRAMNAME`, `PROGRAMCEILING`, `PROGRAMSTATUS`, `ISSUERNACE21SECTOR`, `INSTRUMENTQUOTATIONBASIS`
|
||||
|
||||
## Testing
|
||||
|
||||
### Post-Export Verification
|
||||
|
||||
1. **Verify CSV files in DATA bucket** (DEBT, DEBT_DAILY - last 6 months):
|
||||
```sql
|
||||
-- Check exported files
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_data_dev/o/ODS/CSDB/'
|
||||
)) WHERE object_name LIKE '%CSDB_DEBT%';
|
||||
```
|
||||
|
||||
2. **Verify Parquet files in HIST bucket** (all 6 tables):
|
||||
```sql
|
||||
-- Check archived files with Hive partitioning
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_hist_dev/o/ARCHIVE/CSDB/'
|
||||
)) WHERE object_name LIKE '%PARTITION_YEAR=%';
|
||||
```
|
||||
|
||||
3. **Validate row counts match source tables**:
|
||||
```sql
|
||||
-- Compare counts between source and exported data
|
||||
SELECT COUNT(*) FROM OU_CSDB.DEBT;
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS; -- External table pointing to DATA
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ARCHIVE; -- External table pointing to HIST
|
||||
```
|
||||
|
||||
4. **Verify column mappings**:
|
||||
```sql
|
||||
-- Check A_WORKFLOW_HISTORY_KEY exists in exported data
|
||||
SELECT A_WORKFLOW_HISTORY_KEY, COUNT(*)
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE
|
||||
GROUP BY A_WORKFLOW_HISTORY_KEY;
|
||||
```
|
||||
|
||||
## Known Issues
|
||||
|
||||
### Timing Constraints
|
||||
- **DATA exports (DEBT, DEBT_DAILY)**: Must execute during parallel old+new loads phase after Production deployment
|
||||
- **HIST exports (all 6 tables)**: Can run anytime, recommended for weekend bulk execution to avoid interference
|
||||
|
||||
### Environment-Specific Configuration
|
||||
- Bucket names must be adjusted for each environment:
|
||||
- DEV: `mrds_data_dev`, `mrds_hist_dev`
|
||||
- TEST: `mrds_data_test`, `mrds_hist_test`
|
||||
- PROD: `mrds_data_prod`, `mrds_hist_prod`
|
||||
|
||||
### Data Cutoff Date
|
||||
- Export scripts use 6-month cutoff date calculated as `ADD_MONTHS(SYSDATE, -6)`
|
||||
- Verify cutoff aligns with business requirements before execution
|
||||
|
||||
### One-Time Execution
|
||||
- This is a **ONE-TIME data migration** package
|
||||
- After successful execution, package should be **deactivated** (moved to REL01_POST_DEACTIVATION)
|
||||
- Do not re-run unless explicitly required for data refresh
|
||||
|
||||
## Related
|
||||
- **JIRA**: MARS-835 - CSDB Data Export to External Tables
|
||||
- **Confluence**: FILE_MANAGER package - MRDS - Technical Team
|
||||
- **Confluence**: Table Setup Guide for FILE PROCESSOR System
|
||||
- **Source Schema**: OU_CSDB (Operational Database)
|
||||
- **Target Schema**: ODS (External Tables)
|
||||
- **Migration Type**: One-time bulk export (deactivated post-execution)
|
||||
|
||||
---
|
||||
|
||||
**Author:** Grzegorz Michalski
|
||||
**Date:** 2025-12-04
|
||||
**Version:** 1.0.0
|
||||
@@ -0,0 +1,207 @@
|
||||
# MARS-835: Required External Tables for Smart Column Mapping
|
||||
|
||||
## Overview
|
||||
This document lists all external tables required for MARS-835 data exports using DATA_EXPORTER v2.4.0 with Smart Column Mapping feature.
|
||||
|
||||
**Purpose**: Smart Column Mapping ensures CSV files are generated with columns in the EXACT order expected by external tables, preventing NULL values due to Oracle's positional CSV mapping.
|
||||
|
||||
---
|
||||
|
||||
## Required External Tables
|
||||
|
||||
### Group 1: DATA Bucket (CSV Format) - **CRITICAL**
|
||||
|
||||
#### 1. ODS.CSDB_DEBT_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 2. ODS.CSDB_DEBT_DAILY_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
### Group 2: ARCHIVE Bucket (Parquet Format) - **RECOMMENDED**
|
||||
|
||||
#### 3. ODS.CSDB_DEBT_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
- **Note**: Parquet uses schema-based mapping (column order less critical but Smart Column Mapping ensures consistency)
|
||||
|
||||
#### 4. ODS.CSDB_DEBT_DAILY_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 5. ODS.CSDB_INSTR_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 6. ODS.CSDB_INSTR_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 7. ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 8. ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
## External Table Column Order Requirements
|
||||
|
||||
### **CRITICAL for CSV Tables** (DATA bucket):
|
||||
|
||||
All CSV external tables MUST have **A_WORKFLOW_HISTORY_KEY at position 2**:
|
||||
|
||||
```
|
||||
Position 1: A_KEY (NUMBER)
|
||||
Position 2: A_WORKFLOW_HISTORY_KEY (NUMBER) ← MUST BE HERE!
|
||||
Position 3+: Other columns in any order
|
||||
```
|
||||
|
||||
**Reason**: Oracle External Tables with CSV format use **positional mapping** (ignore header row). If source table has A_ETL_LOAD_SET_FK at position 72, but CSV puts it at position 72 while external table expects A_WORKFLOW_HISTORY_KEY at position 2, the external table will try to read position 2 (which might be a DATE column) as NUMBER → conversion fails → NULL value.
|
||||
|
||||
**Solution**: Smart Column Mapping (v2.4.0) generates CSV columns in EXTERNAL TABLE order, ensuring position 2 has the correct NUMBER value.
|
||||
|
||||
### **OPTIONAL for Parquet Tables** (ARCHIVE bucket):
|
||||
|
||||
Parquet format uses **schema-based mapping** (column names). Column order doesn't matter, but Smart Column Mapping provides consistency.
|
||||
|
||||
---
|
||||
|
||||
## Creation Script Example
|
||||
|
||||
### CSV External Table (CRITICAL - Correct Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_DATA_ODS
|
||||
-- IMPORTANT: A_WORKFLOW_HISTORY_KEY must be at position 2!
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_DATA_ODS',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ODS/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvDataBucketUri,
|
||||
pFormat => 'CSV' -- Uses positional mapping!
|
||||
);
|
||||
END;
|
||||
/
|
||||
|
||||
-- Verify column order (A_WORKFLOW_HISTORY_KEY should be position 2)
|
||||
SELECT column_id, column_name, data_type
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = 'CSDB_DEBT_DATA_ODS'
|
||||
AND owner = 'ODS'
|
||||
ORDER BY column_id;
|
||||
```
|
||||
|
||||
### Parquet External Table (Optional Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_ARCHIVE
|
||||
-- Column order flexible (schema-based mapping)
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_ARCHIVE',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvArchiveBucketUri,
|
||||
pFormat => 'PARQUET' -- Uses schema-based mapping
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Template Tables Required
|
||||
|
||||
All external tables require corresponding template tables in CT_ET_TEMPLATES schema:
|
||||
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_DAILY_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL_TEMPLATE`
|
||||
|
||||
**Note**: Template tables must be created by ADMIN or CT_ET_TEMPLATES user (MRDS_LOADER cannot create them).
|
||||
|
||||
---
|
||||
|
||||
## Verification Checklist
|
||||
|
||||
Before running MARS-835 exports:
|
||||
|
||||
- [ ] All 8 external tables exist in ODS schema
|
||||
- [ ] CSV tables (DATA bucket) have A_WORKFLOW_HISTORY_KEY at position 2
|
||||
- [ ] Template tables exist in CT_ET_TEMPLATES schema
|
||||
- [ ] MRDS_LOADER has EXECUTE privilege on ODS.FILE_MANAGER_ODS
|
||||
- [ ] ODS schema has access to CT_MRDS.ENV_MANAGER for logging
|
||||
- [ ] DATA_EXPORTER v2.4.0 deployed with Smart Column Mapping feature
|
||||
|
||||
---
|
||||
|
||||
## Testing Verification
|
||||
|
||||
After export, verify A_WORKFLOW_HISTORY_KEY is not NULL:
|
||||
|
||||
```sql
|
||||
-- CSV tables (should be 100% populated)
|
||||
SELECT 'CSDB_DEBT_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DATA_ODS;
|
||||
|
||||
SELECT 'CSDB_DEBT_DAILY_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DAILY_DATA_ODS;
|
||||
|
||||
-- Parquet tables (should also be 100% populated)
|
||||
SELECT 'CSDB_DEBT_ARCHIVE' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE;
|
||||
```
|
||||
|
||||
**Expected Result**: SUCCESS_RATE_PCT = 100.00 for all tables
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [DATA_EXPORTER v2.4.0 Smart Column Mapping Examples](../MARS-835-PREHOOK/current_version/v2.3.0/DATA_EXPORTER_v2.4.0_Smart_Column_Mapping_Examples.sql)
|
||||
- [Oracle External Tables Column Order Issue](../../confluence/additions/Oracle_External_Tables_Column_Order_Issue.md)
|
||||
- [MARS-835 README](README.md)
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2026-01-09
|
||||
**Author**: GitHub Copilot (MARS-835 Update)
|
||||
104
MARS_Packages/REL01_ADDITIONS/MARS-835/install_mars835.sql
Normal file
104
MARS_Packages/REL01_ADDITIONS/MARS-835/install_mars835.sql
Normal file
@@ -0,0 +1,104 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835 INSTALL SCRIPT: CSDB Data Export to External Tables
|
||||
-- ===================================================================
|
||||
-- Purpose: One-time bulk export of 6 CSDB tables from OU_CSDB schema
|
||||
-- to OCI buckets (DATA/CSV and HIST/Parquet formats)
|
||||
-- Uses DATA_EXPORTER v2.4.0 with per-column date format handling for data migration
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Version: 1.0.0
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- Log files are automatically created in log/ subdirectory
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/INSTALL_MARS_835_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
-- Set current schema context (optional - use when modifying packages in specific schema)
|
||||
-- ALTER SESSION SET CURRENT_SCHEMA = CT_MRDS;
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835: CSDB Data Export to External Tables (One-Time Migration)
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will export 6 CSDB tables to OCI buckets:
|
||||
PROMPT
|
||||
PROMPT GROUP 1 - Split DATA + HIST (Time Critical):
|
||||
PROMPT - DEBT: Last 6 months to DATA, older to HIST
|
||||
PROMPT - DEBT_DAILY: Last 6 months to DATA, older to HIST
|
||||
PROMPT
|
||||
PROMPT GROUP 2 - HIST Only (Weekend Bulk):
|
||||
PROMPT - INSTR_RAT_FULL, INSTR_DESC_FULL
|
||||
PROMPT - ISSUER_RAT_FULL, ISSUER_DESC_FULL
|
||||
PROMPT
|
||||
PROMPT Column transformations:
|
||||
PROMPT - A_ETL_LOAD_SET_FK renamed to A_WORKFLOW_HISTORY_KEY (all tables)
|
||||
PROMPT - Legacy columns removed from DEBT and DEBT_DAILY
|
||||
PROMPT
|
||||
PROMPT Expected Duration: 30-60 minutes (depends on data volume)
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm installation with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with installation, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Installation aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Pre-Check: Verify existing files in DATA and HIST buckets
|
||||
PROMPT =========================================================================
|
||||
@@00_MARS_835_pre_check_existing_files.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Export Group 1 - Split DATA + HIST (DEBT, DEBT_DAILY)
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_835_install_step1.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Export Group 2 - HIST Only (4 tables)
|
||||
PROMPT =========================================================================
|
||||
@@02_MARS_835_install_step2.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Verify Exports (File Presence Check)
|
||||
PROMPT =========================================================================
|
||||
@@03_MARS_835_verify_exports.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Record Counts (Source vs Archive)
|
||||
PROMPT =========================================================================
|
||||
@@04_MARS_835_verify_record_counts.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835 Installation - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT Check the log file for complete installation details.
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
73
MARS_Packages/REL01_ADDITIONS/MARS-835/rollback_mars835.sql
Normal file
73
MARS_Packages/REL01_ADDITIONS/MARS-835/rollback_mars835.sql
Normal file
@@ -0,0 +1,73 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835 ROLLBACK SCRIPT: CSDB Data Export Rollback
|
||||
-- ===================================================================
|
||||
-- Purpose: Rollback MARS-835 - Delete exported CSV/Parquet files from OCI buckets
|
||||
-- WARNING: This will DELETE all exported data files!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/ROLLBACK_MARS_835_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835: Rollback CSDB Data Export
|
||||
PROMPT =========================================================================
|
||||
PROMPT WARNING: This will DELETE exported CSV and Parquet files from OCI buckets!
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/
|
||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/
|
||||
PROMPT
|
||||
PROMPT Only proceed if export failed and needs to be restarted!
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm rollback with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with rollback, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Rollback aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Delete Group 2 Exported Files (HIST only - 4 tables)
|
||||
PROMPT =========================================================================
|
||||
@@92_MARS_835_rollback_step2.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Delete Group 1 Exported Files (DATA + HIST - 2 tables)
|
||||
PROMPT =========================================================================
|
||||
@@91_MARS_835_rollback_step1.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Verify Rollback Completed
|
||||
PROMPT =========================================================================
|
||||
@@99_MARS_835_verify_rollback.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835 Rollback - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,92 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- Add or remove entries as needed for your MARS issue
|
||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.FILE_MANAGER',
|
||||
'ODS.FILE_MANAGER_ODS'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT ' || vPackageList(i) || '.GET_VERSION() FROM DUAL'
|
||||
INTO vVersion;
|
||||
|
||||
vPackages.EXTEND;
|
||||
vPackages(vPackages.COUNT).owner := vOwner;
|
||||
vPackages(vPackages.COUNT).name := vPackageName;
|
||||
vPackages(vPackages.COUNT).version := vVersion;
|
||||
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
||||
pPackageAuthor => 'Grzegorz Michalski'
|
||||
);
|
||||
vCount := vCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Error tracking ' || vPackageList(i) || ': ' || SQLERRM);
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- Display results
|
||||
IF vPackages.COUNT > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || ' of ' || vPackages.COUNT);
|
||||
FOR i IN 1..vPackages.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).name ||
|
||||
' (v' || vPackages(i).version || ')');
|
||||
END LOOP;
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('No packages found in list');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,62 @@
|
||||
-- ===================================================================
|
||||
-- Universal Package Version Verification Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify all tracked Oracle packages for code changes
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- USAGE:
|
||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
||||
--
|
||||
-- OUTPUT:
|
||||
-- - List of all tracked packages with their current status
|
||||
-- - OK: Package has not changed since last tracking
|
||||
-- - WARNING: Package code changed without version update
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET FEEDBACK OFF
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Package Version Verification
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
|
||||
COLUMN PACKAGE_OWNER FORMAT A15
|
||||
COLUMN PACKAGE_NAME FORMAT A20
|
||||
COLUMN VERSION FORMAT A10
|
||||
COLUMN STATUS FORMAT A80
|
||||
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION AS VERSION,
|
||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
||||
FROM (
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION,
|
||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
)
|
||||
WHERE RN = 1
|
||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Verification Complete
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
PROMPT Legend:
|
||||
PROMPT OK - Package has not changed since last tracking
|
||||
PROMPT WARNING - Package code changed without version update
|
||||
PROMPT
|
||||
PROMPT For detailed hash information, use:
|
||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
||||
PROMPT ========================================
|
||||
|
||||
SET FEEDBACK ON
|
||||
Reference in New Issue
Block a user