Compare commits

...

8 Commits

Author SHA1 Message Date
Grzegorz Michalski
5c77d42d9a Update author information to Grzegorz Michalski in MARS-956 SQL scripts and remove pre-check script from installation process 2026-02-13 19:42:02 +01:00
Grzegorz Michalski
d370b9c9ef Update DATA_EXPORTER package to version 2.10.0 with multi-file support for DBMS_CLOUD.EXPORT_DATA, preventing orphaned files in rollback processes. 2026-02-13 19:08:54 +01:00
Grzegorz Michalski
c5d39fc01d Add ENV_MANAGER package with comprehensive error handling and logging features
- Introduced ENV_MANAGER package for managing environment configurations and error handling.
- Implemented detailed error codes and messages for various scenarios, enhancing debugging capabilities.
- Added logging procedures to track process events and errors, with configurable logging levels.
- Included version management functions to track package versioning and changes.
- Established a structure for documenting functions and procedures for better maintainability.
2026-02-13 13:56:36 +01:00
Grzegorz Michalski
80916ea302 Refactor SQL scripts to replace MRDS_LOADER.cloud_wrapper.list_objects with DBMS_CLOUD.LIST_OBJECTS for improved compatibility 2026-02-13 12:39:03 +01:00
Grzegorz Michalski
a4ab56e96d Enhance MARS-835 rollback process with new scripts for file registration deletion and manual cleanup, and add process tracking identifier for exports 2026-02-13 12:36:45 +01:00
Grzegorz Michalski
92c5261215 Revert "Add process tracking identifier to MARS-835 export and rollback scripts"
This reverts commit 2c40f091e0.
2026-02-13 09:45:49 +01:00
Grzegorz Michalski
b34c942a8f Revert "Enhance MARS-826 Rollback Scripts and Export Procedures"
This reverts commit d6c34085f7.
2026-02-13 09:24:02 +01:00
Grzegorz Michalski
d175179ddc Update DATA_EXPORTER package version to 2.9.0 and enhance version history for process tracking improvements 2026-02-13 09:21:06 +01:00
43 changed files with 4681 additions and 848 deletions

View File

@@ -24,9 +24,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER',
pParallelDegree => 1,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 1
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
EXCEPTION
@@ -46,9 +44,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM',
pParallelDegree => 1,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 1
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
EXCEPTION
@@ -68,9 +64,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER',
pParallelDegree => 1,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 1
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
EXCEPTION

View File

@@ -29,9 +29,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER',
pParallelDegree => 4,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 4
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
EXCEPTION
@@ -51,9 +49,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM',
pParallelDegree => 16,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 16
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
EXCEPTION

View File

@@ -24,9 +24,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER',
pParallelDegree => 1,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 1
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
EXCEPTION
@@ -46,9 +44,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM',
pParallelDegree => 2,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 2
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
EXCEPTION
@@ -68,9 +64,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER',
pParallelDegree => 2,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 2
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
EXCEPTION

View File

@@ -29,9 +29,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES',
pParallelDegree => 8,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 8
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
EXCEPTION
@@ -51,9 +49,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER',
pParallelDegree => 2,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 2
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
EXCEPTION

View File

@@ -25,9 +25,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER',
pParallelDegree => 2,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 2
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
EXCEPTION
@@ -47,9 +45,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM',
pParallelDegree => 16,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 16
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
EXCEPTION

View File

@@ -29,9 +29,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER',
pParallelDegree => 4,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 4
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
EXCEPTION
@@ -51,9 +49,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM',
pParallelDegree => 16,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 16
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
EXCEPTION

View File

@@ -24,9 +24,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER',
pParallelDegree => 1,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 1
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
EXCEPTION
@@ -46,9 +44,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM',
pParallelDegree => 4,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 4
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
EXCEPTION
@@ -68,9 +64,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER',
pParallelDegree => 2,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 2
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
EXCEPTION

View File

@@ -24,9 +24,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER',
pParallelDegree => 1,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 1
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
EXCEPTION
@@ -46,9 +44,7 @@ BEGIN
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
pBucketArea => 'ARCHIVE',
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM',
pParallelDegree => 1,
pRegisterExport => TRUE,
pProcessName => 'MARS-826'
pParallelDegree => 1
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
EXCEPTION

View File

@@ -1,46 +0,0 @@
-- =====================================================================================
-- Script: 90_MARS_826_rollback_file_registrations.sql
-- Purpose: Delete file registration records for MARS-826 from A_SOURCE_FILE_RECEIVED
-- Author: Grzegorz Michalski
-- Created: 2026-02-13
-- MARS Issue: MARS-826
-- Target Table: CT_MRDS.A_SOURCE_FILE_RECEIVED
-- =====================================================================================
SET SERVEROUTPUT ON SIZE UNLIMITED;
SET FEEDBACK ON;
PROMPT =====================================================================================
PROMPT MARS-826 Rollback: Deleting file registration records from A_SOURCE_FILE_RECEIVED
PROMPT =====================================================================================
DECLARE
vDeletedCount NUMBER := 0;
BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting file registrations for PROCESS_NAME = ''MARS-826''...');
-- Delete all records registered by MARS-826 process
DELETE FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826';
vDeletedCount := SQL%ROWCOUNT;
COMMIT;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Deleted ' || vDeletedCount || ' file registration record(s)');
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
EXCEPTION
WHEN OTHERS THEN
ROLLBACK;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
RAISE;
END;
/

View File

@@ -23,6 +23,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -30,45 +35,56 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting ADHOC_ADJ files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for ADHOC_ADJ tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%ADHOC_ADJUSTMENTS%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for ADHOC_ADJ tables
vFolders := t_folder_list(
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER/',
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM/',
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No ADHOC_ADJ files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -22,6 +22,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -29,45 +34,55 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting BALANCESHEET files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for BALANCESHEET tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%BALANCESHEET%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for BALANCESHEET tables
vFolders := t_folder_list(
'ARCHIVE/LM/LM_BALANCESHEET_HEADER/',
'ARCHIVE/LM/LM_BALANCESHEET_ITEM/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No BALANCESHEET files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -23,6 +23,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -30,45 +35,56 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting CSM_ADJ files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for CSM_ADJ tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%CSM_ADJUSTMENTS%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for CSM_ADJ tables
vFolders := t_folder_list(
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER/',
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM/',
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No CSM_ADJ files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -22,6 +22,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -29,45 +34,55 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting STANDING_FACILITY files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for STANDING_FACILITY tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%STANDING_FACILITIES%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for STANDING_FACILITY tables
vFolders := t_folder_list(
'ARCHIVE/LM/LM_STANDING_FACILITIES/',
'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No STANDING_FACILITY files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -22,6 +22,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -29,45 +34,55 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting MRR_IND_CURRENT_ACCOUNT files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for MRR_IND_CURRENT_ACCOUNT tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%CURRENT_ACCOUNTS%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for MRR_IND_CURR_ACC tables (OU_MRR schema)
vFolders := t_folder_list(
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER/',
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No MRR_IND_CURRENT_ACCOUNT files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -22,6 +22,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -29,45 +34,55 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting FORECAST files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for FORECAST tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%FORECAST%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for FORECAST tables
vFolders := t_folder_list(
'ARCHIVE/LM/LM_FORECAST_HEADER/',
'ARCHIVE/LM/LM_FORECAST_ITEM/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No FORECAST files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -23,6 +23,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -30,45 +35,56 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting QR_ADJ files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for QR_ADJ tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%QRE_ADJUSTMENTS%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for QR_ADJ tables
vFolders := t_folder_list(
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER/',
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM/',
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No QR_ADJ files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -22,6 +22,11 @@ DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vDeletedCount NUMBER := 0;
vTotalDeleted NUMBER := 0;
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
vFolders t_folder_list;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
@@ -29,45 +34,55 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Deleting TTS files registered by MARS-826 process...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-826''');
-- Delete files registered by MARS-826 process for TTS tables
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-826'
AND (SOURCE_FILE_NAME LIKE '%TTS%')
ORDER BY SOURCE_FILE_NAME
) LOOP
vFileCount := vFileCount + 1;
-- Initialize folder list for TTS tables
vFolders := t_folder_list(
'ARCHIVE/LM/LM_TTS_HEADER/',
'ARCHIVE/LM/LM_TTS_ITEM/'
);
-- Process each folder
FOR i IN 1..vFolders.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
vFileCount := 0;
vDeletedCount := 0;
BEGIN
-- Delete the file
DBMS_CLOUD.DELETE_OBJECT(
-- List and delete all Parquet files in the folder
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
);
location_uri => vBucketUri || vFolders(i)
))
WHERE object_name LIKE '%.parquet'
ORDER BY object_name
) LOOP
vFileCount := vFileCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
BEGIN
-- Delete the Parquet file
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || vFolders(i) || rec.object_name
);
vDeletedCount := vDeletedCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
END IF;
END;
END;
END LOOP;
vTotalDeleted := vTotalDeleted + vDeletedCount;
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
DBMS_OUTPUT.PUT_LINE('');
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No TTS files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('=====================================================================================');

View File

@@ -57,7 +57,6 @@ PROMPT
PROMPT ============================================================================
-- Execute rollback scripts
@@90_MARS_826_rollback_file_registrations.sql
@@91_MARS_826_rollback_ADHOC_ADJ_tables.sql
@@92_MARS_826_rollback_BALANCESHEET_tables.sql
@@93_MARS_826_rollback_CSM_ADJ_tables.sql

View File

@@ -785,15 +785,14 @@ AS
-- Register exported file to A_SOURCE_FILE_RECEIVED if requested
IF pRegisterExport THEN
DECLARE
vChecksum VARCHAR2(128);
vCreated TIMESTAMP WITH TIME ZONE;
vBytes NUMBER;
vActualFileName VARCHAR2(1000); -- Actual filename with Oracle suffix
vSanitizedFileName VARCHAR2(1000);
vFileName VARCHAR2(1000);
vRetryCount NUMBER := 0;
vMaxRetries NUMBER := 1; -- One retry after initial attempt
vRetryDelay NUMBER := 2; -- 2 seconds delay
vFilesFound NUMBER := 0;
vTotalBytes NUMBER := 0;
BEGIN
-- Extract filename from URI (after last '/')
vFileName := SUBSTR(vUri, INSTR(vUri, '/', -1) + 1);
@@ -805,24 +804,70 @@ AS
-- Example: tablename.csv becomes tablename_1_20260211T102621591769Z.csv
vSanitizedFileName := REGEXP_REPLACE(vSanitizedFileName, '\.csv$', '', 1, 0, 'i');
-- Try to get file metadata with retry logic
-- Try to get ALL exported files with retry logic
-- Oracle DBMS_CLOUD.EXPORT_DATA can create MULTIPLE files due to:
-- 1. maxfilesize parameter (splits files larger than limit)
-- 2. Automatic parallel processing (especially on large production instances)
-- We must register ALL files, not just the first one
<<metadata_retry_loop>>
LOOP
BEGIN
SELECT object_name, checksum, created, bytes
INTO vActualFileName, vChecksum, vCreated, vBytes
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => pCredentialName,
location_uri => vBucketUri
))
WHERE object_name LIKE CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || vSanitizedFileName || '%'
ORDER BY created DESC, bytes DESC
FETCH FIRST 1 ROW ONLY;
-- Register ALL files matching the pattern (cursor loop)
FOR rec IN (
SELECT object_name, checksum, created, bytes
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => pCredentialName,
location_uri => vBucketUri
))
WHERE object_name LIKE CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || vSanitizedFileName || '%'
ORDER BY created DESC, bytes DESC
) LOOP
-- Extract filename only from full path (remove bucket folder prefix)
vActualFileName := SUBSTR(rec.object_name, INSTR(rec.object_name, '/', -1) + 1);
-- Create A_SOURCE_FILE_RECEIVED record for EACH exported file
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
A_SOURCE_FILE_RECEIVED_KEY,
A_SOURCE_FILE_CONFIG_KEY,
SOURCE_FILE_NAME,
CHECKSUM,
CREATED,
BYTES,
RECEPTION_DATE,
PROCESSING_STATUS,
PARTITION_YEAR,
PARTITION_MONTH,
ARCH_PATH,
PROCESS_NAME
) VALUES (
vSourceFileReceivedKey,
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
vActualFileName, -- Use actual filename with Oracle suffix
rec.checksum,
rec.created,
rec.bytes,
SYSDATE,
'INGESTED',
NULL, -- PARTITION_YEAR not used for single-file exports
NULL, -- PARTITION_MONTH not used for single-file exports
NULL, -- ARCH_PATH not used for single-file exports
pProcessName -- Process name from parameter
);
vFilesFound := vFilesFound + 1;
vTotalBytes := vTotalBytes + rec.bytes;
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file ' || vFilesFound || ': FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || rec.bytes || ' bytes', 'INFO', vParameters);
END LOOP;
-- Extract filename only from full path (remove bucket folder prefix)
vActualFileName := SUBSTR(vActualFileName, INSTR(vActualFileName, '/', -1) + 1);
-- Check if any files were found
IF vFilesFound = 0 THEN
RAISE NO_DATA_FOUND;
END IF;
-- Success - exit retry loop
ENV_MANAGER.LOG_PROCESS_EVENT('Total registered: ' || vFilesFound || ' file(s), Total size: ' || vTotalBytes || ' bytes (' || ROUND(vTotalBytes/1048576, 2) || ' MB)', 'INFO', vParameters);
EXIT metadata_retry_loop;
EXCEPTION
@@ -831,7 +876,7 @@ AS
IF vRetryCount <= vMaxRetries THEN
-- Log retry attempt
ENV_MANAGER.LOG_PROCESS_EVENT('File not found in bucket (attempt ' || vRetryCount || '/' || (vMaxRetries + 1) || '), retrying after ' || vRetryDelay || ' seconds: ' || vFileName, 'DEBUG', vParameters);
ENV_MANAGER.LOG_PROCESS_EVENT('File(s) not found in bucket (attempt ' || vRetryCount || '/' || (vMaxRetries + 1) || '), retrying after ' || vRetryDelay || ' seconds: ' || vFileName, 'DEBUG', vParameters);
-- Wait before retry using DBMS_SESSION.SLEEP (alternative to DBMS_LOCK)
DBMS_SESSION.SLEEP(vRetryDelay);
@@ -841,38 +886,6 @@ AS
END IF;
END;
END LOOP metadata_retry_loop;
-- Create A_SOURCE_FILE_RECEIVED record for this export with metadata
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
A_SOURCE_FILE_RECEIVED_KEY,
A_SOURCE_FILE_CONFIG_KEY,
SOURCE_FILE_NAME,
CHECKSUM,
CREATED,
BYTES,
RECEPTION_DATE,
PROCESSING_STATUS,
PARTITION_YEAR,
PARTITION_MONTH,
ARCH_PATH,
PROCESS_NAME
) VALUES (
vSourceFileReceivedKey,
NVL(vConfigKey, -1), -- Use config key if found, otherwise -1
vActualFileName, -- Use actual filename with Oracle suffix
vChecksum,
vCreated,
vBytes,
SYSDATE,
'INGESTED',
NULL, -- PARTITION_YEAR not used for single-file exports
NULL, -- PARTITION_MONTH not used for single-file exports
NULL, -- ARCH_PATH not used for single-file exports
pProcessName -- Process name from parameter
);
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vActualFileName || ', Size=' || vBytes || ' bytes', 'INFO', vParameters);
EXCEPTION
WHEN NO_DATA_FOUND THEN
-- File not found after retries - log warning and continue without metadata

View File

@@ -9,19 +9,17 @@ AS
**/
-- Package Version Information
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.8.1';
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-12 10:30:00';
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.10.0';
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-13 16:30:00';
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
-- Version History (last 3-5 changes)
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
'v2.10.0 (2026-02-13): CRITICAL FIX - Register ALL files created by DBMS_CLOUD.EXPORT_DATA (multi-file support due to Oracle parallel processing on large instances). Prevents orphaned files in rollback.' || CHR(10) ||
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10) ||
'v2.8.0 (2026-02-12): MAJOR REFACTOR - EXPORT_TABLE_DATA now exports to single CSV file instead of partitioning by key values. Added pFileName parameter.' || CHR(10) ||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10) ||
'v2.7.3 (2026-02-11): FIX LIKE pattern for DBMS_CLOUD.LIST_OBJECTS - Removed .csv extension from filename before pattern matching.' || CHR(10) ||
'v2.7.2 (2026-02-11): FIX pRegisterExport in EXPORT_TABLE_DATA_TO_CSV_BY_DATE - Added missing pRegisterExport parameter to EXPORT_SINGLE_PARTITION call.' || CHR(10) ||
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName to automatically find config key from A_SOURCE_FILE_CONFIG.' || CHR(10);
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10);
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
vgMsgTmp VARCHAR2(32000);

View File

@@ -0,0 +1,239 @@
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
AUTHID CURRENT_USER
AS
/**
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
* which returns documentation text for confluence page (to Copy-Paste it).
**/
-- Package Version Information
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.9.0';
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-13 14:00:00';
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
-- Version History (last 3-5 changes)
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
'v2.9.0 (2026-02-13): Added pProcessName parameter to EXPORT_TABLE_DATA and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures for process tracking in A_SOURCE_FILE_RECEIVED table.' || CHR(10) ||
'v2.8.1 (2026-02-12): FIX query in EXPORT_TABLE_DATA - removed A_LOAD_HISTORY join to ensure single file output (simple SELECT).' || CHR(10) ||
'v2.8.0 (2026-02-12): MAJOR REFACTOR - EXPORT_TABLE_DATA now exports to single CSV file instead of partitioning by key values. Added pFileName parameter.' || CHR(10) ||
'v2.7.5 (2026-02-11): Added pRegisterExport parameter to EXPORT_TABLE_DATA procedure. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
'v2.7.4 (2026-02-11): ACTUAL FILENAME STORAGE - Store real filename with Oracle suffix in SOURCE_FILE_NAME instead of theoretical filename.' || CHR(10);
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
vgMsgTmp VARCHAR2(32000);
---------------------------------------------------------------------------------------------------------------------------
-- TYPE DEFINITIONS FOR PARTITION HANDLING
---------------------------------------------------------------------------------------------------------------------------
/**
* Record type for year/month partition information
**/
TYPE partition_rec IS RECORD (
year VARCHAR2(4),
month VARCHAR2(2)
);
/**
* Table type for collection of partition records
**/
TYPE partition_tab IS TABLE OF partition_rec;
---------------------------------------------------------------------------------------------------------------------------
-- INTERNAL PARALLEL PROCESSING CALLBACK
---------------------------------------------------------------------------------------------------------------------------
/**
* @name EXPORT_PARTITION_PARALLEL
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
* Processes single partition (year/month) chunk in parallel task.
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
* but should NOT be called directly by external code.
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
**/
PROCEDURE EXPORT_PARTITION_PARALLEL (
pStartId IN NUMBER,
pEndId IN NUMBER
);
---------------------------------------------------------------------------------------------------------------------------
-- MAIN EXPORT PROCEDURES
---------------------------------------------------------------------------------------------------------------------------
/**
* @name EXPORT_TABLE_DATA
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
* Exports data into single CSV file on OCI infrastructure.
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
* Supports template table for column order and per-column date formatting.
* When pRegisterExport=TRUE, successfully exported file is registered in:
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
* @param pFileName - Optional filename (e.g., 'export.csv'). NULL = auto-generate from table name
* @param pTemplateTableName - Optional template table (SCHEMA.TABLE or TABLE) for:
* - Column order control (template defines CSV structure)
* - Per-column date formatting via FILE_MANAGER.GET_DATE_FORMAT
* - NULL = use source table columns in natural order
* @param pMaxFileSize - Maximum file size in bytes (default 104857600 = 100MB, min 10MB, max 1GB)
* @param pRegisterExport - When TRUE, registers exported CSV file in A_SOURCE_FILE_RECEIVED table
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
* @example
* begin
* DATA_EXPORTER.EXPORT_TABLE_DATA(
* pSchemaName => 'CT_MRDS',
* pTableName => 'MY_TABLE',
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
* pBucketArea => 'DATA',
* pFolderName => 'csv_exports',
* pFileName => 'my_export.csv', -- Optional
* pTemplateTableName => 'CT_ET_TEMPLATES.MY_TEMPLATE', -- Optional
* pMaxFileSize => 104857600, -- Optional, default 100MB
* pRegisterExport => TRUE -- Optional, default FALSE
* );
* end;
**/
PROCEDURE EXPORT_TABLE_DATA (
pSchemaName IN VARCHAR2,
pTableName IN VARCHAR2,
pKeyColumnName IN VARCHAR2,
pBucketArea IN VARCHAR2,
pFolderName IN VARCHAR2,
pFileName IN VARCHAR2 default NULL,
pTemplateTableName IN VARCHAR2 default NULL,
pMaxFileSize IN NUMBER default 104857600,
pRegisterExport IN BOOLEAN default FALSE,
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
);
/**
* @name EXPORT_TABLE_DATA_BY_DATE
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
* Exports data into PARQUET files on OCI infrustructure.
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
* Validates that all columns in pColumnList exist in the target table.
* Automatically adds 'T.' prefix to column names in pColumnList.
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
* @example
* begin
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
* pSchemaName => 'CT_MRDS',
* pTableName => 'MY_TABLE',
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
* pBucketArea => 'DATA',
* pFolderName => 'parquet_exports',
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
* pMinDate => DATE '2024-01-01',
* pMaxDate => SYSDATE,
* pParallelDegree => 8 -- Optional, default 1, range 1-16
* );
* end;
**/
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
pSchemaName IN VARCHAR2,
pTableName IN VARCHAR2,
pKeyColumnName IN VARCHAR2,
pBucketArea IN VARCHAR2,
pFolderName IN VARCHAR2,
pColumnList IN VARCHAR2 default NULL,
pMinDate IN DATE default DATE '1900-01-01',
pMaxDate IN DATE default SYSDATE,
pParallelDegree IN NUMBER default 1,
pTemplateTableName IN VARCHAR2 default NULL,
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
);
/**
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
* @desc Exports data to separate CSV files partitioned by year and month.
* Creates one CSV file for each year/month combination found in the data.
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
* but exports to CSV format instead of Parquet.
* Supports parallel partition processing via pParallelDegree parameter (1-16).
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
* When pRegisterExport=TRUE, successfully exported files are registered in:
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
* @param pProcessName - Process name stored in PROCESS_NAME column (default 'DATA_EXPORTER')
* @example
* begin
* -- With custom filename
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
* pSchemaName => 'CT_MRDS',
* pTableName => 'MY_TABLE',
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
* pBucketArea => 'DATA',
* pFolderName => 'exports',
* pFileName => 'my_export.csv',
* pMinDate => DATE '2024-01-01',
* pMaxDate => SYSDATE,
* pParallelDegree => 8, -- Optional, default 1, range 1-16
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
* );
*
* -- With auto-generated filename (based on table name only)
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
* pSchemaName => 'OU_TOP',
* pTableName => 'AGGREGATED_ALLOTMENT',
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
* pBucketArea => 'ARCHIVE',
* pFolderName => 'exports',
* pMinDate => DATE '2025-09-01',
* pMaxDate => DATE '2025-09-17',
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
* );
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
* end;
**/
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
pSchemaName IN VARCHAR2,
pTableName IN VARCHAR2,
pKeyColumnName IN VARCHAR2,
pBucketArea IN VARCHAR2,
pFolderName IN VARCHAR2,
pFileName IN VARCHAR2 DEFAULT NULL,
pColumnList IN VARCHAR2 default NULL,
pMinDate IN DATE default DATE '1900-01-01',
pMaxDate IN DATE default SYSDATE,
pParallelDegree IN NUMBER default 1,
pTemplateTableName IN VARCHAR2 default NULL,
pMaxFileSize IN NUMBER default 104857600,
pRegisterExport IN BOOLEAN default FALSE,
pProcessName IN VARCHAR2 default 'DATA_EXPORTER',
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
);
---------------------------------------------------------------------------------------------------------------------------
-- VERSION MANAGEMENT FUNCTIONS
---------------------------------------------------------------------------------------------------------------------------
/**
* Returns the current package version number
* return: Version string in format X.Y.Z (e.g., '2.1.0')
**/
FUNCTION GET_VERSION RETURN VARCHAR2;
/**
* Returns comprehensive build information including version, date, and author
* return: Formatted string with complete build details
**/
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
/**
* Returns the version history with recent changes
* return: Multi-line string with version history
**/
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
END;
/

View File

@@ -0,0 +1,625 @@
create or replace PACKAGE CT_MRDS.ENV_MANAGER
AUTHID CURRENT_USER
AS
/**
* General comment for package: Please put comments for functions and procedures as shown in below example.
* It is a standard.
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
* which returns documentation text for confluence page (to Copy-Paste it).
**/
-- Example comment:
/**
* @name EX_PROCEDURE_NAME
* @desc Procedure description
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
* @ex_rslt Example Result
**/
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
-- Version History (Latest changes first)
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
TYPE Error_Record IS RECORD (
code PLS_INTEGER,
message VARCHAR2(4000)
);
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
Errors tErrorList;
guid VARCHAR2(32);
gvEnv VARCHAR2(200);
gvUsername VARCHAR2(128);
gvOsuser VARCHAR2(128);
gvMachine VARCHAR2(64);
gvModule VARCHAR2(64);
gvNameSpace VARCHAR2(200);
gvRegion VARCHAR2(200);
gvDataBucketName VARCHAR2(200);
gvInboxBucketName VARCHAR2(200);
gvArchiveBucketName VARCHAR2(200);
gvDataBucketUri VARCHAR2(200);
gvInboxBucketUri VARCHAR2(200);
gvArchiveBucketUri VARCHAR2(200);
gvCredentialName VARCHAR2(200);
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
vgSourceFileConfigKey PLS_INTEGER;
vgMsgTmp VARCHAR2(32000);
--Exceptions
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
,CODE_EMPTY_FILEURI_AND_RECKEY);
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
||cgBL||' The file provided in parameter: pFileUri does not have '
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
||cgBL||' for example: ''YYYY-MM-DD''';
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
,CODE_MISSING_COLUMN_DATE_FORMAT);
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
,CODE_FILE_NOT_FOUND_ON_CLOUD);
ERR_FILE_VALIDATION_FAILED EXCEPTION;
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
,CODE_FILE_VALIDATION_FAILED);
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
,CODE_EXCESS_COLUMNS_DETECTED);
ERR_NO_CONFIG_MATCH EXCEPTION;
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
,CODE_NO_CONFIG_MATCH);
ERR_UNKNOWN_PREFIX EXCEPTION;
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
,CODE_UNKNOWN_PREFIX);
ERR_TABLE_NOT_EXISTS EXCEPTION;
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
,CODE_TABLE_NOT_EXISTS);
ERR_COLUMN_NOT_EXISTS EXCEPTION;
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
,CODE_COLUMN_NOT_EXISTS);
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
,CODE_UNSUPPORTED_DATA_TYPE);
ERR_MISSING_SOURCE_KEY EXCEPTION;
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
,CODE_MISSING_SOURCE_KEY);
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
,CODE_DUPLICATED_SOURCE_KEY);
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
,CODE_MISSING_CONTAINER_CONFIG);
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
,CODE_MULTIPLE_CONTAINER_ENTRIES);
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
,CODE_WRONG_DESTINATION_PARAM);
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
,CODE_FILE_ALREADY_REGISTERED);
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
||cgBL||' Information about environment is needed to get proper configuration values.'
||cgBL||' It can be set up in two different ways:'
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
;
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
,CODE_ENVIRONMENT_NOT_SET);
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
,CODE_CONFIG_VARIABLE_NOT_SET);
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
,CODE_EXP_DATA_FOR_ARCH_FAILED);
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
,CODE_RESTORE_FILE_FROM_TRASH);
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
,CODE_MOVE_FILE_TO_TRASH_FAILED);
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
,CODE_DROP_EXPORTED_FILES_FAILED);
ERR_INVALID_BUCKET_AREA EXCEPTION;
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
,CODE_INVALID_BUCKET_AREA);
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
,CODE_INVALID_PARALLEL_DEGREE);
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
,CODE_PARALLEL_EXECUTION_FAILED);
ERR_UNKNOWN EXCEPTION;
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
,CODE_UNKNOWN);
---------------------------------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------------------------------
/**
* @name LOG_PROCESS_EVENT
* @desc Insert a new log record into A_PROCESS_LOG table.
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
* Respects logging level configuration (gvMinLogLevel).
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
**/
PROCEDURE LOG_PROCESS_EVENT (
pLogMessage VARCHAR2
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
,pParameters VARCHAR2 DEFAULT NULL
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
);
/**
* @name LOG_PROCESS_ERROR
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
* This procedure captures comprehensive error information for debugging purposes while
* allowing clean user-facing error messages to be raised separately.
* @param pLogMessage - Base error message description
* @param pParameters - Procedure parameters for context
* @param pProcessName - Name of the calling process/package
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
*/
PROCEDURE LOG_PROCESS_ERROR (
pLogMessage VARCHAR2
,pParameters VARCHAR2 DEFAULT NULL
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
);
/**
* @name INIT_ERRORS
* @desc Loads data into Errors array.
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
* Called automatically during package initialization.
* @example Called automatically when package is first referenced
* @ex_rslt Errors array populated with all error codes and messages
**/
PROCEDURE INIT_ERRORS;
/**
* @name GET_DEFAULT_ENV
* @desc It returns string with name of default environment.
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
* @ex_rslt dev
**/
FUNCTION GET_DEFAULT_ENV
RETURN VARCHAR2;
/**
* @name INIT_VARIABLES
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
* Assign values to following global package variables:
* - gvNameSpace
* - gvRegion
* - gvCredentialName
* - gvInboxBucketName
* - gvDataBucketName
* - gvArchiveBucketName
* - gvInboxBucketUri
* - gvDataBucketUri
* - gvArchiveBucketUri
* - gvLoggingEnabled
* - gvMinLogLevel
* - gvDefaultDateFormat
* - gvConsoleLoggingEnabled
**/
PROCEDURE INIT_VARIABLES(
pEnv VARCHAR2
);
/**
* @name GET_ERROR_MESSAGE
* @desc It returns string with error message for specified pCode (Error_Code).
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
* @ex_rslt File not found on the cloud
**/
FUNCTION GET_ERROR_MESSAGE(
pCode PLS_INTEGER
) RETURN VARCHAR2;
/**
* @name GET_ERROR_STACK
* @desc It returns string with all possible error stack info.
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
* @example
* select ENV_MANAGER.GET_ERROR_STACK(
* pFormat => 'OUTPUT'
* ,pCode => -20009
* ,pSourceFileReceivedKey => NULL)
* from dual
* @ex_rslt
* ------------------------------------------------------+
* Error Message:
* ORA-0000: normal, successful completion
* -------------------------------------------------------
* Error Stack:
* -------------------------------------------------------
* Error Backtrace:
* ------------------------------------------------------+
**/
FUNCTION GET_ERROR_STACK(
pFormat VARCHAR2
,pCode PLS_INTEGER
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
) RETURN VARCHAR2;
/**
* @name FORMAT_PARAMETERS
* @desc Formats parameter list for logging purposes.
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
* @ex_rslt param1=value1 ,
* param2=NULL
**/
FUNCTION FORMAT_PARAMETERS(
pParameterList SYS.ODCIVARCHAR2LIST
) RETURN VARCHAR2;
/**
* @name ANALYZE_VALIDATION_ERRORS
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
* Compares CSV structure with template table and provides specific error analysis.
* Includes suggested solutions for common validation issues.
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
* @param pCsvFileUri - URI of CSV file being validated
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
**/
FUNCTION ANALYZE_VALIDATION_ERRORS(
pValidationLogTable VARCHAR2,
pTemplateSchema VARCHAR2,
pTemplateTable VARCHAR2,
pCsvFileUri VARCHAR2
) RETURN VARCHAR2;
---------------------------------------------------------------------------------------------------------------------------
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
---------------------------------------------------------------------------------------------------------------------------
/**
* @name GET_VERSION
* @desc Returns the current version number of the ENV_MANAGER package.
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
* @ex_rslt 3.0.0
**/
FUNCTION GET_VERSION RETURN VARCHAR2;
/**
* @name GET_BUILD_INFO
* @desc Returns comprehensive build information including version, build date, and author.
* Formatted for display in logs or monitoring systems.
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
* @ex_rslt Package: ENV_MANAGER
* Version: 3.0.0
* Build Date: 2025-10-22 16:00:00
* Author: Grzegorz Michalski
**/
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
/**
* @name GET_VERSION_HISTORY
* @desc Returns complete version history with all releases and changes.
* Shows evolution of package features over time.
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
* @ex_rslt ENV_MANAGER Version History:
* 3.0.0 (2025-10-22): Added package versioning system...
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
**/
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
/**
* @name GET_PACKAGE_VERSION_INFO
* @desc Universal function to get formatted version information for any package.
* This centralized function is used by all packages in the system.
* @param pPackageName - Name of the package
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
* @param pBuildDate - Build date timestamp
* @param pAuthor - Package author name
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
* @ex_rslt Package: FILE_MANAGER
* Version: 2.1.0
* Build Date: 2025-10-22 15:00:00
* Author: Grzegorz Michalski
**/
FUNCTION GET_PACKAGE_VERSION_INFO(
pPackageName VARCHAR2,
pVersion VARCHAR2,
pBuildDate VARCHAR2,
pAuthor VARCHAR2
) RETURN VARCHAR2;
/**
* @name FORMAT_VERSION_HISTORY
* @desc Universal function to format version history for any package.
* Adds package name header and proper formatting.
* @param pPackageName - Name of the package
* @param pVersionHistory - Complete version history text
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
* @ex_rslt FILE_MANAGER Version History:
* 2.1.0 (2025-10-22): Export procedures...
**/
FUNCTION FORMAT_VERSION_HISTORY(
pPackageName VARCHAR2,
pVersionHistory VARCHAR2
) RETURN VARCHAR2;
---------------------------------------------------------------------------------------------------------------------------
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
---------------------------------------------------------------------------------------------------------------------------
/**
* @name CALCULATE_PACKAGE_HASH
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
* Returns hash for both SPEC and BODY (if exists).
* Used for automatic change detection.
* @param pPackageOwner - Schema owner of the package
* @param pPackageName - Name of the package
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
**/
FUNCTION CALCULATE_PACKAGE_HASH(
pPackageOwner VARCHAR2,
pPackageName VARCHAR2,
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
) RETURN VARCHAR2;
/**
* @name TRACK_PACKAGE_VERSION
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
* Automatically detects if source code changed without version update.
* Should be called after every package deployment.
* @param pPackageOwner - Schema owner of the package
* @param pPackageName - Name of the package
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
**/
PROCEDURE TRACK_PACKAGE_VERSION(
pPackageOwner VARCHAR2,
pPackageName VARCHAR2,
pPackageVersion VARCHAR2,
pPackageBuildDate VARCHAR2,
pPackageAuthor VARCHAR2
);
/**
* @name CHECK_PACKAGE_CHANGES
* @desc Checks if package source code has changed since last tracking.
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
* Returns detailed change detection report.
* @param pPackageOwner - Schema owner of the package
* @param pPackageName - Name of the package
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
* @ex_rslt WARNING: Package changed without version update!
* Last Version: 3.2.0
* Current Hash (SPEC): A7B3C5D9...
* Last Hash (SPEC): B8C4D6E0...
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
**/
FUNCTION CHECK_PACKAGE_CHANGES(
pPackageOwner VARCHAR2,
pPackageName VARCHAR2
) RETURN VARCHAR2;
/**
* @name GET_PACKAGE_HASH_INFO
* @desc Returns formatted information about package hash and tracking history.
* Includes current hash, last tracked hash, and change detection status.
* @param pPackageOwner - Schema owner of the package
* @param pPackageName - Name of the package
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
* Current Version: 3.2.0
* Current Hash (SPEC): A7B3C5D9...
* Last Tracked: 2025-10-22 16:30:00
* Status: OK - No changes detected
**/
FUNCTION GET_PACKAGE_HASH_INFO(
pPackageOwner VARCHAR2,
pPackageName VARCHAR2
) RETURN VARCHAR2;
END ENV_MANAGER;
/

View File

@@ -108,7 +108,7 @@ BEGIN
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
pProcessName => 'MARS-835' -- Process identifier for tracking
pProcessName => 'MARS-835' -- Process identifier for tracking
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to DATA bucket with template column order');
@@ -129,9 +129,7 @@ BEGIN
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
pMaxDate => &cutoff_date,
pParallelDegree => 16,
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
pRegisterExport => TRUE,
pProcessName => 'MARS-835'
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT'
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to HIST bucket with template column order');
@@ -231,7 +229,7 @@ BEGIN
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
pMaxFileSize => 104857600, -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
pRegisterExport => TRUE, -- Register exported files in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES)
pProcessName => 'MARS-835' -- Process identifier for tracking
pProcessName => 'MARS-835' -- Process identifier for tracking
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to DATA bucket with template column order');
@@ -252,9 +250,7 @@ BEGIN
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
pMaxDate => &cutoff_date,
pParallelDegree => 16,
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
pRegisterExport => TRUE,
pProcessName => 'MARS-835'
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY'
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to HIST bucket with template column order');

View File

@@ -35,9 +35,7 @@ BEGIN
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL',
pMaxDate => SYSDATE,
pParallelDegree => 8,
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL',
pRegisterExport => TRUE,
pProcessName => 'MARS-835'
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL'
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_RAT_FULL exported to HIST bucket with template column order');
@@ -64,9 +62,7 @@ BEGIN
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL',
pMaxDate => SYSDATE,
pParallelDegree => 8,
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL',
pRegisterExport => TRUE,
pProcessName => 'MARS-835'
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL'
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_DESC_FULL exported to HIST bucket with template column order');
@@ -93,9 +89,7 @@ BEGIN
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL',
pMaxDate => SYSDATE,
pParallelDegree => 8,
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL',
pRegisterExport => TRUE,
pProcessName => 'MARS-835'
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL'
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_RAT_FULL exported to HIST bucket with template column order');
@@ -122,9 +116,7 @@ BEGIN
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL',
pMaxDate => SYSDATE,
pParallelDegree => 8,
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL',
pRegisterExport => TRUE,
pProcessName => 'MARS-835'
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL'
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_DESC_FULL exported to HIST bucket with template column order');

View File

@@ -0,0 +1,54 @@
--=============================================================================================================================
-- MARS-835 ROLLBACK: Delete File Registration Records
--=============================================================================================================================
-- Purpose: Delete all file registration records from A_SOURCE_FILE_RECEIVED table for MARS-835 process
-- Author: Grzegorz Michalski
-- Date: 2026-02-13
-- Related: MARS-835 - CSDB Data Export Rollback
--=============================================================================================================================
SET SERVEROUTPUT ON SIZE UNLIMITED
SET TIMING ON
PROMPT ========================================================================
PROMPT ROLLBACK: Deleting file registration records from A_SOURCE_FILE_RECEIVED
PROMPT ========================================================================
DECLARE
vRowCount NUMBER := 0;
vStartTime TIMESTAMP := SYSTIMESTAMP;
vEndTime TIMESTAMP;
vElapsedSeconds NUMBER;
BEGIN
DBMS_OUTPUT.PUT_LINE('Deleting all MARS-835 file registrations from A_SOURCE_FILE_RECEIVED...');
-- Delete all records for MARS-835 process
DELETE FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-835';
vRowCount := SQL%ROWCOUNT;
COMMIT;
vEndTime := SYSTIMESTAMP;
vElapsedSeconds := EXTRACT(SECOND FROM (vEndTime - vStartTime)) +
EXTRACT(MINUTE FROM (vEndTime - vStartTime)) * 60 +
EXTRACT(HOUR FROM (vEndTime - vStartTime)) * 3600;
DBMS_OUTPUT.PUT_LINE('========================================================================');
DBMS_OUTPUT.PUT_LINE('SUCCESS: File registration records deleted');
DBMS_OUTPUT.PUT_LINE('========================================================================');
DBMS_OUTPUT.PUT_LINE('Records deleted: ' || vRowCount);
DBMS_OUTPUT.PUT_LINE('Elapsed time: ' || ROUND(vElapsedSeconds, 2) || ' seconds');
DBMS_OUTPUT.PUT_LINE('========================================================================');
EXCEPTION
WHEN OTHERS THEN
ROLLBACK;
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to delete file registration records');
DBMS_OUTPUT.PUT_LINE('Error message: ' || SQLERRM);
RAISE;
END;
/
--=============================================================================================================================
-- End of Script
--=============================================================================================================================

View File

@@ -29,35 +29,56 @@ BEGIN
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from DATA and HIST buckets...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-835''');
DBMS_OUTPUT.PUT_LINE('Deleting DEBT CSV files from DATA bucket...');
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
-- Delete files registered by MARS-835 process for CSDB_DEBT
-- Delete CSV files for DEBT from DATA bucket using LIST_OBJECTS
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name,
CASE
WHEN SOURCE_FILE_NAME LIKE '%.csv' THEN 'DATA'
WHEN SOURCE_FILE_NAME LIKE '%.parquet' THEN 'ARCHIVE'
ELSE 'UNKNOWN'
END AS bucket_type
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-835'
AND SOURCE_FILE_NAME LIKE '%DEBT%'
AND SOURCE_FILE_NAME NOT LIKE '%DEBT_DAILY%'
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
))
WHERE object_name LIKE 'LEGACY_DEBT%'
) LOOP
BEGIN
IF rec.bucket_type = 'DATA' THEN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vDataBucketUri || rec.object_name
);
ELSIF rec.bucket_type = 'ARCHIVE' THEN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vHistBucketUri || rec.object_name
);
END IF;
DBMS_OUTPUT.PUT_LINE(' Deleted (' || rec.bucket_type || '): ' || rec.object_name);
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
RAISE;
END IF;
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('Deleting DEBT Parquet files from ARCHIVE bucket...');
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
vFileCount := 0;
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
))
WHERE object_name NOT LIKE '%/' -- Exclude directories
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
@@ -70,10 +91,10 @@ BEGIN
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT files found to delete');
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT Parquet files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT files deleted (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
END;
/
@@ -96,34 +117,56 @@ BEGIN
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from DATA and HIST buckets...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-835''');
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY CSV files from DATA bucket...');
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS to scan bucket');
-- Delete files registered by MARS-835 process for CSDB_DEBT_DAILY
-- Delete CSV files for DEBT_DAILY from DATA bucket using LIST_OBJECTS
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name,
CASE
WHEN SOURCE_FILE_NAME LIKE '%.csv' THEN 'DATA'
WHEN SOURCE_FILE_NAME LIKE '%.parquet' THEN 'ARCHIVE'
ELSE 'UNKNOWN'
END AS bucket_type
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-835'
AND SOURCE_FILE_NAME LIKE '%DEBT_DAILY%'
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
))
WHERE object_name LIKE 'LEGACY_DEBT_DAILY%'
) LOOP
BEGIN
IF rec.bucket_type = 'DATA' THEN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vDataBucketUri || rec.object_name
);
ELSIF rec.bucket_type = 'ARCHIVE' THEN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vHistBucketUri || rec.object_name
);
END IF;
DBMS_OUTPUT.PUT_LINE(' Deleted (' || rec.bucket_type || '): ' || rec.object_name);
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
RAISE;
END IF;
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY CSV files deleted from DATA bucket (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY Parquet files from ARCHIVE bucket...');
DBMS_OUTPUT.PUT_LINE(' Using DBMS_CLOUD.LIST_OBJECTS (Parquet files not registered)');
vFileCount := 0;
-- Delete Parquet files from ARCHIVE bucket using DBMS_CLOUD.LIST_OBJECTS
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
))
WHERE object_name NOT LIKE '%/' -- Exclude directories
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
@@ -136,10 +179,10 @@ BEGIN
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT_DAILY files found to delete');
DBMS_OUTPUT.PUT_LINE(' INFO: No DEBT_DAILY Parquet files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY files deleted (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY Parquet files deleted from ARCHIVE bucket (' || vFileCount || ' file(s))');
END;
/

View File

@@ -17,29 +17,29 @@ PROMPT ========================================================================
DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
BEGIN
-- Get bucket URI and credential
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('Deleting INSTR_RAT_FULL files from HIST bucket...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-835''');
-- Delete files registered by MARS-835 process for INSTR_RAT_FULL
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-835'
AND SOURCE_FILE_NAME LIKE '%INSTR_RAT_FULL%'
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/'
))
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
AND object_name LIKE '%.parquet'
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
@@ -50,11 +50,7 @@ BEGIN
END;
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No INSTR_RAT_FULL files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_RAT_FULL files deleted (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_RAT_FULL files deleted');
END;
/
@@ -65,29 +61,29 @@ PROMPT ========================================================================
DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
BEGIN
-- Get bucket URI and credential
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('Deleting INSTR_DESC_FULL files from HIST bucket...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-835''');
-- Delete files registered by MARS-835 process for INSTR_DESC_FULL
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-835'
AND SOURCE_FILE_NAME LIKE '%INSTR_DESC_FULL%'
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/'
))
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
AND object_name LIKE '%.parquet'
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
@@ -98,11 +94,7 @@ BEGIN
END;
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No INSTR_DESC_FULL files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_DESC_FULL files deleted (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_DESC_FULL files deleted');
END;
/
@@ -113,29 +105,29 @@ PROMPT ========================================================================
DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
BEGIN
-- Get bucket URI and credential
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('Deleting ISSUER_RAT_FULL files from HIST bucket...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-835''');
-- Delete files registered by MARS-835 process for ISSUER_RAT_FULL
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-835'
AND SOURCE_FILE_NAME LIKE '%ISSUER_RAT_FULL%'
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/'
))
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
AND object_name LIKE '%.parquet'
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
@@ -146,11 +138,7 @@ BEGIN
END;
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No ISSUER_RAT_FULL files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_RAT_FULL files deleted (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_RAT_FULL files deleted');
END;
/
@@ -161,29 +149,29 @@ PROMPT ========================================================================
DECLARE
vBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
BEGIN
-- Get bucket URI and credential
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('Deleting ISSUER_DESC_FULL files from HIST bucket...');
DBMS_OUTPUT.PUT_LINE(' Using A_SOURCE_FILE_RECEIVED with PROCESS_NAME = ''MARS-835''');
-- Delete files registered by MARS-835 process for ISSUER_DESC_FULL
FOR rec IN (
SELECT SOURCE_FILE_NAME AS object_name
FROM CT_MRDS.A_SOURCE_FILE_RECEIVED
WHERE PROCESS_NAME = 'MARS-835'
AND SOURCE_FILE_NAME LIKE '%ISSUER_DESC_FULL%'
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
))
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
AND object_name LIKE '%.parquet'
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vBucketUri || rec.object_name
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
@@ -194,11 +182,7 @@ BEGIN
END;
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE(' INFO: No ISSUER_DESC_FULL files found to delete');
END IF;
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_DESC_FULL files deleted (' || vFileCount || ' file(s))');
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_DESC_FULL files deleted');
END;
/

View File

@@ -0,0 +1,80 @@
SET SERVEROUTPUT ON SIZE UNLIMITED
SET DEFINE OFF
DECLARE
vCredential VARCHAR2(100) := 'OCI$RESOURCE_PRINCIPAL';
vDataBucket VARCHAR2(200) := 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frtgjxu7zl7c/b/data/o/';
vArchiveBucket VARCHAR2(200) := 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frtgjxu7zl7c/b/history/o/';
vCount NUMBER := 0;
BEGIN
DBMS_OUTPUT.PUT_LINE('=== Checking CSV files in DATA bucket ===');
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredential,
location_uri => vDataBucket || 'ODS/CSDB/CSDB_DEBT/'
))
WHERE object_name LIKE 'LEGACY_DEBT%'
) LOOP
vCount := vCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vCount || '] ' || rec.object_name);
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total CSV files DEBT: ' || vCount);
vCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredential,
location_uri => vDataBucket || 'ODS/CSDB/CSDB_DEBT_DAILY/'
))
WHERE object_name LIKE 'LEGACY_DEBT_DAILY%'
) LOOP
vCount := vCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vCount || '] ' || rec.object_name);
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total CSV files DEBT_DAILY: ' || vCount);
DBMS_OUTPUT.PUT_LINE(CHR(10) || '=== Checking Parquet files in ARCHIVE bucket ===');
vCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredential,
location_uri => vArchiveBucket || 'ARCHIVE/CSDB/CSDB_DEBT/'
))
WHERE ROWNUM <= 5
) LOOP
vCount := vCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vCount || '] ' || rec.object_name);
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total Parquet files DEBT (first 5): ' || vCount);
vCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredential,
location_uri => vArchiveBucket || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
))
) LOOP
vCount := vCount + 1;
DBMS_OUTPUT.PUT_LINE(' [' || vCount || '] ' || rec.object_name);
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total Parquet files DEBT_DAILY: ' || vCount);
DBMS_OUTPUT.PUT_LINE(CHR(10) || '=== Now testing DELETE_OBJECT ===');
DBMS_OUTPUT.PUT_LINE('Testing delete for: ODS/CSDB/CSDB_DEBT/LEGACY_DEBT_202510_1_20260213T092239041072Z.csv');
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredential,
object_uri => vDataBucket || 'ODS/CSDB/CSDB_DEBT/LEGACY_DEBT_202510_1_20260213T092239041072Z.csv'
);
DBMS_OUTPUT.PUT_LINE('SUCCESS: File deleted');
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE('ERROR: ' || SQLERRM);
END;
END;
/

View File

@@ -0,0 +1,126 @@
--=============================================================================================================================
-- MARS-835 Manual Cleanup - Delete remaining files after rollback
--=============================================================================================================================
SET SERVEROUTPUT ON SIZE UNLIMITED
SET DEFINE OFF
DECLARE
vDataBucketUri VARCHAR2(500);
vArchiveBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
BEGIN
-- Get bucket URIs and credential
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
vArchiveBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('========================================================================');
DBMS_OUTPUT.PUT_LINE('MARS-835 Manual Cleanup');
DBMS_OUTPUT.PUT_LINE('========================================================================');
-- Delete DEBT CSV files from DATA bucket
DBMS_OUTPUT.PUT_LINE(CHR(10) || '1. Deleting DEBT CSV files from DATA bucket...');
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
))
WHERE object_name LIKE 'LEGACY_DEBT%'
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR: ' || rec.object_name || ' - ' || SQLERRM);
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total deleted: ' || vFileCount);
-- Delete DEBT_DAILY CSV files from DATA bucket
DBMS_OUTPUT.PUT_LINE(CHR(10) || '2. Deleting DEBT_DAILY CSV files from DATA bucket...');
vFileCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
))
WHERE object_name LIKE 'LEGACY_DEBT_DAILY%'
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR: ' || rec.object_name || ' - ' || SQLERRM);
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total deleted: ' || vFileCount);
-- Delete DEBT Parquet files from ARCHIVE bucket
DBMS_OUTPUT.PUT_LINE(CHR(10) || '3. Deleting DEBT Parquet files from ARCHIVE bucket...');
vFileCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vArchiveBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
))
WHERE object_name NOT LIKE '%/'
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vArchiveBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR: ' || rec.object_name || ' - ' || SQLERRM);
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total deleted: ' || vFileCount);
-- Delete DEBT_DAILY Parquet files from ARCHIVE bucket
DBMS_OUTPUT.PUT_LINE(CHR(10) || '4. Deleting DEBT_DAILY Parquet files from ARCHIVE bucket...');
vFileCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vArchiveBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
))
WHERE object_name NOT LIKE '%/'
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vArchiveBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE(' ERROR: ' || rec.object_name || ' - ' || SQLERRM);
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('Total deleted: ' || vFileCount);
DBMS_OUTPUT.PUT_LINE(CHR(10) || '========================================================================');
DBMS_OUTPUT.PUT_LINE('Manual cleanup completed');
DBMS_OUTPUT.PUT_LINE('========================================================================');
END;
/

View File

@@ -0,0 +1,93 @@
-- MARS-835: Manual cleanup of Parquet files only (after bugfix)
-- Description: Removes orphaned Parquet files from ARCHIVE bucket
-- Usage: Execute as CT_MRDS user
SET SERVEROUTPUT ON SIZE UNLIMITED
DECLARE
vCredentialName VARCHAR2(100) := CT_MRDS.ENV_MANAGER.gvCredentialName;
vHistBucketUri VARCHAR2(200) := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
vFileCount NUMBER := 0;
vStartTime TIMESTAMP := SYSTIMESTAMP;
vElapsedTime INTERVAL DAY TO SECOND;
BEGIN
DBMS_OUTPUT.PUT_LINE('==========================================================');
DBMS_OUTPUT.PUT_LINE('MANUAL CLEANUP: Parquet files only');
DBMS_OUTPUT.PUT_LINE('==========================================================');
DBMS_OUTPUT.PUT_LINE('Start Time: ' || TO_CHAR(vStartTime, 'YYYY-MM-DD HH24:MI:SS'));
DBMS_OUTPUT.PUT_LINE('Credential: ' || vCredentialName);
DBMS_OUTPUT.PUT_LINE('Archive Bucket: ' || vHistBucketUri);
DBMS_OUTPUT.PUT_LINE('----------------------------------------------------------');
-- Delete CSDB_DEBT Parquet files
DBMS_OUTPUT.PUT_LINE(chr(10) || 'Deleting CSDB_DEBT Parquet files...');
vFileCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
))
WHERE object_name NOT LIKE '%/' -- Exclude directories
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
DBMS_OUTPUT.PUT_LINE(' ERROR: ' || SQLERRM || ' - ' || rec.object_name);
END IF;
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('CSDB_DEBT Parquet files deleted: ' || vFileCount);
-- Delete CSDB_DEBT_DAILY Parquet files
DBMS_OUTPUT.PUT_LINE(chr(10) || 'Deleting CSDB_DEBT_DAILY Parquet files...');
vFileCount := 0;
FOR rec IN (
SELECT object_name
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
))
WHERE object_name NOT LIKE '%/' -- Exclude directories
) LOOP
BEGIN
DBMS_CLOUD.DELETE_OBJECT(
credential_name => vCredentialName,
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
);
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
vFileCount := vFileCount + 1;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE = -20404 THEN
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
ELSE
DBMS_OUTPUT.PUT_LINE(' ERROR: ' || SQLERRM || ' - ' || rec.object_name);
END IF;
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('CSDB_DEBT_DAILY Parquet files deleted: ' || vFileCount);
-- Final summary
vElapsedTime := SYSTIMESTAMP - vStartTime;
DBMS_OUTPUT.PUT_LINE('----------------------------------------------------------');
DBMS_OUTPUT.PUT_LINE('End Time: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
DBMS_OUTPUT.PUT_LINE('Elapsed Time: ' || vElapsedTime);
DBMS_OUTPUT.PUT_LINE('==========================================================');
DBMS_OUTPUT.PUT_LINE('MANUAL CLEANUP COMPLETED');
DBMS_OUTPUT.PUT_LINE('==========================================================');
END;
/

View File

@@ -59,7 +59,13 @@ PROMPT =========================================================================
PROMPT
PROMPT =========================================================================
PROMPT Step 3: Verify Rollback Completed
PROMPT Step 3: Delete File Registration Records from A_SOURCE_FILE_RECEIVED
PROMPT =========================================================================
@@90_MARS_835_rollback_file_registrations.sql
PROMPT
PROMPT =========================================================================
PROMPT Step 4: Verify Rollback Completed
PROMPT =========================================================================
@@99_MARS_835_verify_rollback.sql

View File

@@ -1,287 +0,0 @@
-- =====================================================================================
-- Script: 00_MARS_956_pre_check_prerequisites.sql
-- Purpose: Verify prerequisites for C2D MPEC data export
-- Author: System Generated
-- Created: 2026-02-12
-- MARS Issue: MARS-956
-- Target Locations: mrds_data_dev/ODS/C2D/
-- =====================================================================================
SET SERVEROUTPUT ON SIZE UNLIMITED;
SET FEEDBACK ON;
SET VERIFY OFF;
SET LINESIZE 200;
PROMPT =====================================================================================
PROMPT MARS-956 Pre-Check: Prerequisites for C2D MPEC Data Export
PROMPT =====================================================================================
DECLARE
vDataBucketUri VARCHAR2(500);
vCredentialName VARCHAR2(100);
vFileCount NUMBER := 0;
vAdminRows NUMBER := 0;
vContentRows NUMBER := 0;
vCriterionRows NUMBER := 0;
vAdminCols NUMBER := 0;
vContentCols NUMBER := 0;
vCriterionCols NUMBER := 0;
BEGIN
-- Get bucket URI and credential from FILE_MANAGER configuration
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ODS');
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
DBMS_OUTPUT.PUT_LINE('CHECK TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
DBMS_OUTPUT.PUT_LINE('ODS Bucket URI: ' || vDataBucketUri);
DBMS_OUTPUT.PUT_LINE('Credential: ' || vCredentialName);
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('Check 1: Verify DATA_EXPORTER Package Version');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
-- Check DATA_EXPORTER version
BEGIN
DBMS_OUTPUT.PUT_LINE('DATA_EXPORTER Version: ' || CT_MRDS.DATA_EXPORTER.PACKAGE_VERSION);
DBMS_OUTPUT.PUT_LINE('Build Date: ' || CT_MRDS.DATA_EXPORTER.PACKAGE_BUILD_DATE);
DBMS_OUTPUT.PUT_LINE('SUCCESS: DATA_EXPORTER package is available');
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE('ERROR: ERROR: DATA_EXPORTER package not available: ' || SQLERRM);
RAISE;
END;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('Check 2: Verify Source Tables in OU_LEGACY_C2D Schema');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
-- Check source table row counts
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_ADMIN' INTO vAdminRows;
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT' INTO vContentRows;
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION' INTO vCriterionRows;
DBMS_OUTPUT.PUT_LINE('Source table row counts:');
DBMS_OUTPUT.PUT_LINE('- MPEC_ADMIN: ' || vAdminRows || ' rows');
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT: ' || vContentRows || ' rows');
DBMS_OUTPUT.PUT_LINE('- MPEC_CONTENT_CRITERION: ' || vCriterionRows || ' rows');
IF vAdminRows > 0 AND vContentRows > 0 AND vCriterionRows > 0 THEN
DBMS_OUTPUT.PUT_LINE('SUCCESS: All source tables have data');
ELSE
DBMS_OUTPUT.PUT_LINE('ERROR: One or more source tables are empty');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('Check 3: Verify Template Tables in CT_ET_TEMPLATES Schema');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
-- Check template table structure
SELECT COUNT(*)
INTO vAdminCols
FROM all_tab_columns
WHERE owner = 'CT_ET_TEMPLATES'
AND table_name = 'C2D_MPEC_ADMIN';
SELECT COUNT(*)
INTO vContentCols
FROM all_tab_columns
WHERE owner = 'CT_ET_TEMPLATES'
AND table_name = 'C2D_MPEC_CONTENT';
SELECT COUNT(*)
INTO vCriterionCols
FROM all_tab_columns
WHERE owner = 'CT_ET_TEMPLATES'
AND table_name = 'C2D_MPEC_CONTENT_CRITERION';
DBMS_OUTPUT.PUT_LINE('Template table column counts:');
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_ADMIN: ' || vAdminCols || ' columns');
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT: ' || vContentCols || ' columns');
DBMS_OUTPUT.PUT_LINE('- C2D_MPEC_CONTENT_CRITERION: ' || vCriterionCols || ' columns');
IF vAdminCols > 0 AND vContentCols > 0 AND vCriterionCols > 0 THEN
DBMS_OUTPUT.PUT_LINE('SUCCESS: All template tables have defined structure');
ELSE
DBMS_OUTPUT.PUT_LINE('ERROR: One or more template tables missing columns');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('Check 4: Verify ETL Key References in A_LOAD_HISTORY');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
-- Check ETL key validation
DECLARE
vValidKeys NUMBER := 0;
vTotalSourceKeys NUMBER := 0;
BEGIN
-- Count total distinct ETL keys in source tables
SELECT COUNT(DISTINCT etl_key)
INTO vTotalSourceKeys
FROM (
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
UNION
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
UNION
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
);
-- Count how many exist in A_LOAD_HISTORY
SELECT COUNT(DISTINCT etl_key)
INTO vValidKeys
FROM (
SELECT A_ETL_LOAD_SET_FK AS etl_key FROM OU_LEGACY_C2D.MPEC_ADMIN
UNION
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT
UNION
SELECT A_ETL_LOAD_SET_FK FROM OU_LEGACY_C2D.MPEC_CONTENT_CRITERION
) src
WHERE EXISTS (
SELECT 1 FROM CT_ODS.A_LOAD_HISTORY h
WHERE h.A_ETL_LOAD_SET_KEY = src.etl_key
);
DBMS_OUTPUT.PUT_LINE('ETL key validation:');
DBMS_OUTPUT.PUT_LINE('- Total distinct ETL keys in source: ' || vTotalSourceKeys);
DBMS_OUTPUT.PUT_LINE('- Valid keys (exist in A_LOAD_HISTORY): ' || vValidKeys);
IF vValidKeys = vTotalSourceKeys THEN
DBMS_OUTPUT.PUT_LINE('SUCCESS: All source ETL keys are valid');
ELSE
DBMS_OUTPUT.PUT_LINE('ERROR: Some ETL keys may be invalid: ' || (vTotalSourceKeys - vValidKeys));
END IF;
END;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('Check 5: Verify External Tables in ODS Schema (Target Readiness)');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
-- Check target external table accessibility and current record counts
DECLARE
vAdminExtCount NUMBER := -1;
vContentExtCount NUMBER := -1;
vCriterionExtCount NUMBER := -1;
BEGIN
-- Check if external tables exist and are accessible
BEGIN
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_ADMIN_ODS' INTO vAdminExtCount;
EXCEPTION
WHEN OTHERS THEN
-- Acceptable errors for empty external tables:
-- ORA-29913: error in executing ODCIEXTTABLEOPEN callout
-- ORA-29400: data cartridge error
-- KUP-13023: nothing matched wildcard query (no files)
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' OR SQLERRM LIKE '%does not exist%' THEN
vAdminExtCount := 0; -- Empty/non-existent is OK
ELSE
vAdminExtCount := -1; -- Real error
END IF;
END;
BEGIN
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_ODS' INTO vContentExtCount;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' OR SQLERRM LIKE '%does not exist%' THEN
vContentExtCount := 0;
ELSE
vContentExtCount := -1;
END IF;
END;
BEGIN
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.C2D_MPEC_CONTENT_CRITERION_ODS' INTO vCriterionExtCount;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE IN (-29913, -29400) OR SQLERRM LIKE '%KUP-13023%' OR SQLERRM LIKE '%does not exist%' THEN
vCriterionExtCount := 0;
ELSE
vCriterionExtCount := -1;
END IF;
END;
DBMS_OUTPUT.PUT_LINE('Target external table current counts:');
DBMS_OUTPUT.PUT_LINE('- ODS.C2D_MPEC_ADMIN_ODS: ' ||
CASE WHEN vAdminExtCount = -1 THEN 'ERROR/INACCESSIBLE'
WHEN vAdminExtCount = 0 THEN '0 (empty/clean)'
ELSE TO_CHAR(vAdminExtCount) END);
DBMS_OUTPUT.PUT_LINE('- ODS.C2D_MPEC_CONTENT_ODS: ' ||
CASE WHEN vContentExtCount = -1 THEN 'ERROR/INACCESSIBLE'
WHEN vContentExtCount = 0 THEN '0 (empty/clean)'
ELSE TO_CHAR(vContentExtCount) END);
DBMS_OUTPUT.PUT_LINE('- ODS.C2D_MPEC_CONTENT_CRITERION_ODS: ' ||
CASE WHEN vCriterionExtCount = -1 THEN 'ERROR/INACCESSIBLE'
WHEN vCriterionExtCount = 0 THEN '0 (empty/clean)'
ELSE TO_CHAR(vCriterionExtCount) END);
IF vAdminExtCount >= 0 AND vContentExtCount >= 0 AND vCriterionExtCount >= 0 THEN
IF vAdminExtCount = 0 AND vContentExtCount = 0 AND vCriterionExtCount = 0 THEN
DBMS_OUTPUT.PUT_LINE('SUCCESS: All target external tables are clean (ready for fresh export)');
ELSE
DBMS_OUTPUT.PUT_LINE('WARNING: Target external tables contain data (' ||
(vAdminExtCount + vContentExtCount + vCriterionExtCount) || ' total records)');
DBMS_OUTPUT.PUT_LINE(' Consider rollback if this is a re-run');
END IF;
ELSE
DBMS_OUTPUT.PUT_LINE('ERROR: Some external tables are inaccessible - check table definitions');
END IF;
END;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('Check 6: Check Existing Files in ODS/C2D Bucket');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
-- Check for existing C2D MPEC files
BEGIN
FOR rec IN (
SELECT object_name
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
credential_name => vCredentialName,
location_uri => vDataBucketUri
))
WHERE object_name LIKE 'ODS/C2D/C2D_MPEC_%'
) LOOP
vFileCount := vFileCount + 1;
IF vFileCount = 1 THEN
DBMS_OUTPUT.PUT_LINE('Existing C2D MPEC files in ODS bucket:');
END IF;
DBMS_OUTPUT.PUT_LINE('- ' || rec.object_name);
END LOOP;
IF vFileCount = 0 THEN
DBMS_OUTPUT.PUT_LINE('SUCCESS: No existing C2D MPEC files found - ready for clean export');
ELSE
DBMS_OUTPUT.PUT_LINE('WARNING: Warning: ' || vFileCount || ' existing C2D MPEC files found');
DBMS_OUTPUT.PUT_LINE(' Consider rollback if this is a re-run');
END IF;
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE('ERROR: ERROR checking existing files: ' || SQLERRM);
END;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('MARS-956 Pre-Check Summary');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
DBMS_OUTPUT.PUT_LINE('SUCCESS: DATA_EXPORTER package available');
DBMS_OUTPUT.PUT_LINE('SUCCESS: Source tables: ' || (vAdminRows + vContentRows + vCriterionRows) || ' total rows');
DBMS_OUTPUT.PUT_LINE('SUCCESS: Template tables: All structures defined');
DBMS_OUTPUT.PUT_LINE('SUCCESS: ETL keys: All validated in A_LOAD_HISTORY');
DBMS_OUTPUT.PUT_LINE('SUCCESS: External tables: Accessible and ready');
IF vFileCount > 0 THEN
DBMS_OUTPUT.PUT_LINE('WARNING: Existing files: ' || vFileCount || ' (consider rollback)');
ELSE
DBMS_OUTPUT.PUT_LINE('SUCCESS: Target bucket: Clean for export');
END IF;
DBMS_OUTPUT.PUT_LINE('');
DBMS_OUTPUT.PUT_LINE('Prerequisites check completed - ready to proceed with MARS-956 export');
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
END;
/

View File

@@ -1,7 +1,7 @@
-- =====================================================================================
-- Script: 01_MARS_956_export_c2d_mpec_data.sql
-- Purpose: Export C2D MPEC historical data to ODS bucket
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Created: 2026-02-12
-- MARS Issue: MARS-956
-- Target: mrds_data_dev/ODS/C2D/
@@ -43,7 +43,7 @@ BEGIN
-- Count existing files
SELECT COUNT(*)
INTO vFileCount
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => 'OCI$RESOURCE_PRINCIPAL',
location_uri => vLocationUri
))
@@ -61,7 +61,7 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('Existing files:');
FOR rec IN (
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => 'OCI$RESOURCE_PRINCIPAL',
location_uri => vLocationUri
))
@@ -106,7 +106,7 @@ BEGIN
SELECT COUNT(*)
INTO vFileCount
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => 'OCI$RESOURCE_PRINCIPAL',
location_uri => vLocationUri
))
@@ -123,7 +123,7 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('Existing files:');
FOR rec IN (
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => 'OCI$RESOURCE_PRINCIPAL',
location_uri => vLocationUri
))
@@ -167,7 +167,7 @@ BEGIN
SELECT COUNT(*)
INTO vFileCount
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => 'OCI$RESOURCE_PRINCIPAL',
location_uri => vLocationUri
))
@@ -184,7 +184,7 @@ BEGIN
DBMS_OUTPUT.PUT_LINE('Existing files:');
FOR rec IN (
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => 'OCI$RESOURCE_PRINCIPAL',
location_uri => vLocationUri
))

View File

@@ -2,7 +2,7 @@
-- MARS-956 Verify Exports: Check Export Results and File Creation
-- ===================================================================
-- Purpose: Verify that C2D MPEC export completed successfully
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
SET SERVEROUTPUT ON SIZE UNLIMITED
@@ -112,7 +112,7 @@ BEGIN
BEGIN
FOR rec IN (
SELECT object_name
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vDataBucketUri
))

View File

@@ -2,7 +2,7 @@
-- MARS-956 Verify Data Integrity: Source vs Exported Data Validation
-- ===================================================================
-- Purpose: Verify data integrity between source tables and exported files
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
SET SERVEROUTPUT ON SIZE UNLIMITED

View File

@@ -3,7 +3,7 @@
--=============================================================================================================================
-- Purpose: Delete exported CSV files from ODS/C2D bucket folders for MPEC tables
-- WARNING: This will permanently delete exported data files!
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
-- Related: MARS-956 - C2D MPEC Data Export Rollback
--=============================================================================================================================

View File

@@ -2,7 +2,7 @@
-- MARS-956 Rollback Step 1: Delete File Registrations
-- ===================================================================
-- Purpose: Remove MARS-956 export file registrations from A_SOURCE_FILE_RECEIVED
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
SET SERVEROUTPUT ON SIZE UNLIMITED

View File

@@ -2,7 +2,7 @@
-- MARS-956 Rollback Step 2: Clean Process Logs
-- ===================================================================
-- Purpose: Remove MARS-956 process logs from A_PROCESS_LOG
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
SET SERVEROUTPUT ON SIZE UNLIMITED

View File

@@ -2,7 +2,7 @@
-- MARS-956 Rollback Verification: Confirm Rollback Completion
-- ===================================================================
-- Purpose: Verify that MARS-956 rollback completed successfully
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
SET SERVEROUTPUT ON SIZE UNLIMITED
@@ -92,7 +92,7 @@ BEGIN
BEGIN
FOR rec IN (
SELECT object_name
FROM TABLE(MRDS_LOADER.cloud_wrapper.list_objects(
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => vCredentialName,
location_uri => vDataBucketUri
))

View File

@@ -4,9 +4,8 @@
-- Purpose: One-time bulk export of 3 C2D MPEC tables from OU_LEGACY_C2D schema
-- to OCI buckets (ODS bucket CSV format)
-- Uses DATA_EXPORTER v2.7.5 with pRegisterExport for file registration
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
-- Version: 1.0.0
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
-- Log files are automatically created in log/ subdirectory
@@ -58,12 +57,6 @@ END;
/
WHENEVER SQLERROR CONTINUE
PROMPT
PROMPT =========================================================================
PROMPT Pre-Check: Verify prerequisites and table readiness
PROMPT =========================================================================
@@00_MARS_956_pre_check_prerequisites.sql
PROMPT
PROMPT =========================================================================
PROMPT Step 1: Export C2D MPEC Data to ODS Bucket

View File

@@ -3,7 +3,7 @@
-- ===================================================================
-- Purpose: Rollback MARS-956 - Delete exported CSV files and file registrations
-- WARNING: This will DELETE all exported data files and registrations!
-- Author: System Generated
-- Author: Grzegorz Michalski
-- Date: 2026-02-12
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)