Init
This commit is contained in:
0
MARS_Packages/REL01_POST_DEACTIVATION/.gitkeep
Normal file
0
MARS_Packages/REL01_POST_DEACTIVATION/.gitkeep
Normal file
25
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826-PREHOOK/.gitignore
vendored
Normal file
25
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826-PREHOOK/.gitignore
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
# MARS-826-PREHOOK Package - Git Ignore Rules
|
||||
# Standard exclusions for MARS deployment packages
|
||||
|
||||
# Confluence documentation (generated, not source)
|
||||
confluence/
|
||||
|
||||
# Log files from SPOOL operations
|
||||
log/
|
||||
*.log
|
||||
|
||||
# Test directories and files
|
||||
test/
|
||||
*_test.sql
|
||||
|
||||
# Mock data scripts (development only)
|
||||
mock_data/
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.bak
|
||||
*~
|
||||
|
||||
# OS-specific files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
@@ -0,0 +1,25 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826-PREHOOK Installation Script 00: DATA_EXPORTER Package Specification Update
|
||||
-- ============================================================================
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package specification with version 2.1.1
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE SPECIFICATION DATA_EXPORTER
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK: Installing CT_MRDS.DATA_EXPORTER Package Specification
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.DATA_EXPORTER (SPECIFICATION)
|
||||
PROMPT Version: 2.1.0 -> 2.1.1 (PATCH)
|
||||
PROMPT Change: Updated package version and build date
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy package specification from new_version folder
|
||||
@@new_version\DATA_EXPORTER.pkg
|
||||
|
||||
PROMPT
|
||||
PROMPT Package specification deployment completed.
|
||||
PROMPT
|
||||
@@ -0,0 +1,25 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826-PREHOOK Installation Script 01: DATA_EXPORTER Package Body Update
|
||||
-- ============================================================================
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package with A_ETL_LOAD_SET_KEY support
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE BODY DATA_EXPORTER
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK: Installing CT_MRDS.DATA_EXPORTER Package Body
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.DATA_EXPORTER (BODY)
|
||||
PROMPT Change: Updated JOIN column reference from A_WORKFLOW_HISTORY_KEY to A_ETL_LOAD_SET_KEY
|
||||
PROMPT Impact: Fixes export procedures to work with renamed column in CT_ODS.A_LOAD_HISTORY
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy package body from new_version folder
|
||||
@@new_version\DATA_EXPORTER.pkb
|
||||
|
||||
PROMPT
|
||||
PROMPT Package deployment completed.
|
||||
PROMPT
|
||||
@@ -0,0 +1,25 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826-PREHOOK Rollback Script 92: DATA_EXPORTER Package Specification Rollback
|
||||
-- ============================================================================
|
||||
-- Purpose: Restore previous DATA_EXPORTER package specification (v2.1.0)
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE SPECIFICATION DATA_EXPORTER
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK: Rolling Back CT_MRDS.DATA_EXPORTER Package Specification
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.DATA_EXPORTER (SPECIFICATION)
|
||||
PROMPT Version: 2.1.1 -> 2.1.0 (Rollback)
|
||||
PROMPT Source: current_version/DATA_EXPORTER.pkg (MARS-846)
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy previous package specification from current_version folder (v2.1.0)
|
||||
@@current_version\DATA_EXPORTER.pkg
|
||||
|
||||
PROMPT
|
||||
PROMPT Package specification rollback completed.
|
||||
PROMPT
|
||||
@@ -0,0 +1,27 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826-PREHOOK Rollback Script 91: Restore Previous DATA_EXPORTER Version
|
||||
-- ============================================================================
|
||||
-- Purpose: Rollback DATA_EXPORTER package to version with A_WORKFLOW_HISTORY_KEY
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE BODY DATA_EXPORTER
|
||||
-- WARNING: This will restore package to state BEFORE A_ETL_LOAD_SET_KEY support
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK ROLLBACK: Restoring Previous DATA_EXPORTER Package Body
|
||||
PROMPT ============================================================================
|
||||
PROMPT WARNING: This will restore package to use A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT This rollback is only valid if CT_ODS.A_LOAD_HISTORY column has been
|
||||
PROMPT reverted to A_WORKFLOW_HISTORY_KEY (not A_ETL_LOAD_SET_KEY)
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy previous package body from current_version folder (v2.1.0)
|
||||
@@current_version\DATA_EXPORTER.pkb
|
||||
|
||||
PROMPT
|
||||
PROMPT Package rollback completed.
|
||||
PROMPT Previous version restored (with A_WORKFLOW_HISTORY_KEY references).
|
||||
PROMPT
|
||||
223
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826-PREHOOK/README.md
Normal file
223
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826-PREHOOK/README.md
Normal file
@@ -0,0 +1,223 @@
|
||||
# MARS-826-PREHOOK: DATA_EXPORTER Package Update - Column Rename Support
|
||||
|
||||
## Overview
|
||||
**Purpose**: Update DATA_EXPORTER package to support renamed column in CT_ODS.A_LOAD_HISTORY
|
||||
**Type**: Pre-Hook Deployment (Required before MARS-826)
|
||||
**Target Schema**: CT_MRDS
|
||||
**Database Objects**: DATA_EXPORTER package body
|
||||
|
||||
## Background
|
||||
|
||||
### Issue
|
||||
The CT_ODS.A_LOAD_HISTORY table has column `A_ETL_LOAD_SET_KEY` (PRIMARY KEY), but the DATA_EXPORTER package had hardcoded references to the old column name `A_WORKFLOW_HISTORY_KEY`, causing JOIN failures.
|
||||
|
||||
### Root Cause
|
||||
Column name mismatch discovered during MARS-826 testing:
|
||||
- **Database DDL**: `A_ETL_LOAD_SET_KEY` (correct, as per source definition)
|
||||
- **DATA_EXPORTER package**: `L.A_WORKFLOW_HISTORY_KEY` (incorrect, outdated reference)
|
||||
- **Impact**: All export procedures failed with `ORA-00904: "L"."A_WORKFLOW_HISTORY_KEY": invalid identifier`
|
||||
|
||||
### Solution
|
||||
Updated DATA_EXPORTER.pkb to use correct column name `A_ETL_LOAD_SET_KEY` in all JOIN operations with CT_ODS.A_LOAD_HISTORY.
|
||||
|
||||
## Changes Made
|
||||
|
||||
### Package Version Update
|
||||
**Previous Version**: 2.1.0
|
||||
**New Version**: 2.1.1
|
||||
**Change Type**: PATCH (Bug Fix)
|
||||
**Build Date**: 2025-12-04 13:10:00
|
||||
|
||||
### Modified Files
|
||||
1. **DATA_EXPORTER.pkg** (Package Specification)
|
||||
- Updated PACKAGE_VERSION: '2.1.0' → '2.1.1'
|
||||
- Updated PACKAGE_BUILD_DATE: '2025-12-04 13:10:00'
|
||||
- Updated VERSION_HISTORY with v2.1.1 entry
|
||||
|
||||
2. **DATA_EXPORTER.pkb** (Package Body)
|
||||
- Updated 4 JOIN clauses in dynamic SQL
|
||||
- Updated example in documentation comment
|
||||
|
||||
### Specific Changes
|
||||
|
||||
**Location 1**: Line ~326 (EXPORT_TABLE_DATA_BY_DATE - partition query)
|
||||
```sql
|
||||
-- BEFORE:
|
||||
WHERE T.' || ... || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
-- AFTER:
|
||||
WHERE T.' || ... || ' = L.A_ETL_LOAD_SET_KEY
|
||||
```
|
||||
|
||||
**Location 2**: Line ~340 (EXPORT_TABLE_DATA_BY_DATE - export query)
|
||||
```sql
|
||||
-- BEFORE:
|
||||
WHERE T.' || ... || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
-- AFTER:
|
||||
WHERE T.' || ... || ' = L.A_ETL_LOAD_SET_KEY
|
||||
```
|
||||
|
||||
**Location 3**: Line ~613 (EXPORT_TABLE_DATA_TO_CSV_BY_DATE - partition query)
|
||||
```sql
|
||||
-- BEFORE:
|
||||
WHERE T.' || ... || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
-- AFTER:
|
||||
WHERE T.' || ... || ' = L.A_ETL_LOAD_SET_KEY
|
||||
```
|
||||
|
||||
**Location 4**: Line ~629 (EXPORT_TABLE_DATA_TO_CSV_BY_DATE - export query)
|
||||
```sql
|
||||
-- BEFORE:
|
||||
WHERE T.' || ... || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
-- AFTER:
|
||||
WHERE T.' || ... || ' = L.A_ETL_LOAD_SET_KEY
|
||||
```
|
||||
|
||||
**Location 5**: Line ~398 (Documentation example)
|
||||
```sql
|
||||
-- BEFORE:
|
||||
pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
|
||||
-- AFTER:
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### Prerequisites
|
||||
- Database connection: CT_MRDS schema
|
||||
- Required privileges: ALTER PACKAGE on CT_MRDS.DATA_EXPORTER
|
||||
- Estimated time: 1-2 minutes
|
||||
|
||||
### Installation Steps
|
||||
|
||||
The master installation script performs these operations in sequence:
|
||||
|
||||
1. **Deploy Package Specification** - Install updated DATA_EXPORTER.pkg from new_version/ (v2.1.1)
|
||||
2. **Deploy Package Body** - Install updated DATA_EXPORTER.pkb from new_version/ with column reference fixes
|
||||
3. **Track Version** - Record package version using universal tracking script (Standard)
|
||||
4. **Verify Status** - Check all tracked packages for untracked changes using universal verification script (Standard)
|
||||
|
||||
**Version Folders:**
|
||||
- **current_version/** - Contains DATA_EXPORTER v2.1.0 (backup from MARS-846 for rollback)
|
||||
- **new_version/** - Contains DATA_EXPORTER v2.1.1 (updated with A_ETL_LOAD_SET_KEY fixes)
|
||||
|
||||
### Execution
|
||||
1. Connect to database as CT_MRDS user
|
||||
2. Execute `install_mars826_prehook.sql`
|
||||
3. Verify package compilation status
|
||||
4. Test export functionality
|
||||
|
||||
### Verification
|
||||
```sql
|
||||
-- Check package compilation status
|
||||
SELECT object_name, object_type, status
|
||||
FROM user_objects
|
||||
WHERE object_name = 'DATA_EXPORTER'
|
||||
AND object_type = 'PACKAGE BODY';
|
||||
|
||||
-- Verify package version
|
||||
SELECT CT_MRDS.DATA_EXPORTER.GET_VERSION() FROM DUAL;
|
||||
|
||||
-- Test export with new column reference
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'ADHOC_ADJ_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'TEST_EXPORT'
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
### Rollback
|
||||
Execute `rollback_mars826_prehook.sql` to restore previous package version.
|
||||
|
||||
## Dependencies
|
||||
|
||||
### Required by MARS-826
|
||||
**CRITICAL**: This package must be deployed **BEFORE** MARS-826 execution. MARS-826 export scripts depend on this updated DATA_EXPORTER package.
|
||||
|
||||
### Database Objects
|
||||
- **Table**: CT_ODS.A_LOAD_HISTORY (must have column A_ETL_LOAD_SET_KEY)
|
||||
- **Package**: CT_MRDS.ENV_MANAGER (for error handling and logging)
|
||||
- **Package**: CT_MRDS.FILE_MANAGER (for bucket URI resolution)
|
||||
|
||||
## Configuration Changes
|
||||
|
||||
### Bucket Configuration
|
||||
**Note**: During testing, discovered bucket configuration issue:
|
||||
- Original: `ArchiveBucketName = 'archive'` (bucket did not exist)
|
||||
- Fixed: `ArchiveBucketName = 'history'` (correct bucket name)
|
||||
|
||||
This configuration fix is **NOT** part of this MARS package but was applied directly to database:
|
||||
```sql
|
||||
UPDATE CT_MRDS.A_FILE_MANAGER_CONFIG
|
||||
SET CONFIG_VARIABLE_VALUE = 'history'
|
||||
WHERE CONFIG_VARIABLE = 'ArchiveBucketName';
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
## Testing Results
|
||||
|
||||
### Test Case 1: Single Table Export
|
||||
**Table**: OU_LM.ADHOC_ADJ_HEADER
|
||||
**Result**: ✅ SUCCESS
|
||||
**Output**: 2 Parquet files created with Hive-style partitioning
|
||||
```
|
||||
LM_ADHOC_ADJUSTMENTS_HEADER/PARTITION_YEAR=2025/PARTITION_MONTH=08/202508_1_*.parquet (1,433 bytes)
|
||||
LM_ADHOC_ADJUSTMENTS_HEADER/PARTITION_YEAR=2025/PARTITION_MONTH=09/202509_1_*.parquet (1,432 bytes)
|
||||
```
|
||||
|
||||
### Test Case 2: Package Compilation
|
||||
**Status**: ✅ VALID
|
||||
**Compilation Time**: < 1 second
|
||||
**Errors**: None
|
||||
|
||||
## Impact Analysis
|
||||
|
||||
### Affected Procedures
|
||||
1. `EXPORT_TABLE_DATA_BY_DATE` - Main export with Parquet partitioning
|
||||
2. `EXPORT_TABLE_DATA_TO_CSV_BY_DATE` - CSV export with date filtering
|
||||
|
||||
### Tables Using A_ETL_LOAD_SET_KEY_FK
|
||||
All tables in MARS-826 export scope (19 tables):
|
||||
- OU_LM: 17 tables (ADHOC_ADJ_*, BALANCESHEET_*, CSM_ADJ_*, FORECAST_*, QR_ADJ_*, STANDING_FACILITY*, TTS_*)
|
||||
- OU_MRR: 2 tables (IND_CURRENT_ACCOUNT, IND_OVERNIGHT_DEPOSITS)
|
||||
|
||||
### Backward Compatibility
|
||||
⚠️ **BREAKING CHANGE**: This update is NOT backward compatible with databases where A_LOAD_HISTORY still uses `A_WORKFLOW_HISTORY_KEY` column name.
|
||||
|
||||
## Files Included
|
||||
|
||||
1. **README.md** - This documentation file
|
||||
2. **.gitignore** - Git exclusions (confluence/, log/, test/, mock_data/)
|
||||
3. **install_mars826_prehook.sql** - Master installation script with SPOOL logging (5 steps)
|
||||
4. **rollback_mars826_prehook.sql** - Master rollback script (2 steps)
|
||||
5. **00_MARS_826_PREHOOK_install_DATA_EXPORTER_SPEC.sql** - Deploy updated package specification
|
||||
6. **01_MARS_826_PREHOOK_install_DATA_EXPORTER_BODY.sql** - Deploy updated package body
|
||||
7. **02_MARS_826_PREHOOK_verify_package.sql** - Verify package compilation and test export
|
||||
8. **track_package_versions.sql** - Universal version tracking script (Standard)
|
||||
9. **verify_packages_version.sql** - Universal package verification script (Standard)
|
||||
10. **91_MARS_826_PREHOOK_rollback_DATA_EXPORTER_BODY.sql** - Restore previous package body
|
||||
11. **92_MARS_826_PREHOOK_rollback_DATA_EXPORTER_SPEC.sql** - Restore previous package specification
|
||||
12. **current_version/** - Backup of DATA_EXPORTER v2.1.0 (from MARS-846)
|
||||
13. **new_version/** - Updated DATA_EXPORTER v2.1.1 (with A_ETL_LOAD_SET_KEY fixes)
|
||||
|
||||
## Version History
|
||||
- **v2.1.1** (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY → A_ETL_LOAD_SET_KEY
|
||||
- **v2.1.0** (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support
|
||||
- **v2.0.0** (2025-10-01): Separated export functionality from FILE_MANAGER package
|
||||
|
||||
## Related JIRA Issues
|
||||
- **MARS-826**: Export CSDB historical data to HIST bucket (requires this pre-hook)
|
||||
|
||||
## Author
|
||||
Created by: Grzegorz Michalski
|
||||
Date: 2025-12-04
|
||||
Schema: CT_MRDS
|
||||
@@ -0,0 +1,708 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) ||
|
||||
' FROM ' || vTableName;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) ||
|
||||
' = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(32000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
-- Function to add T. prefix to column names
|
||||
FUNCTION addTablePrefix(pColumnList IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
RETURN 'T.*';
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END addTablePrefix;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := addTablePrefix(pColumnList);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
-- Construct the query to extract data for the current key value
|
||||
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(vKeyValueYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(vKeyValueMonth) || '/' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) || '.parquet';
|
||||
|
||||
--DBMS_OUTPUT.PUT_LINE(vQuery);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
-- Function to add T. prefix to column names
|
||||
FUNCTION addTablePrefix(pColumnList IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
RETURN 'T.*';
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END addTablePrefix;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME.csv (without date range)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := addTablePrefix(pColumnList);
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Fetch unique year/month combinations
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
|
||||
-- Loop over each unique year/month combination
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the CSV file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileBaseName) || '_' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) ||
|
||||
vFileExtension;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to CSV file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to CSV file
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vKeyValuesYear.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,163 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.1.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-10-22 15:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,89 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826-PREHOOK Master Installation Script
|
||||
-- ============================================================================
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package with A_ETL_LOAD_SET_KEY support
|
||||
-- Target Schema: CT_MRDS
|
||||
-- Estimated Time: 1-2 minutes
|
||||
-- Prerequisites: Database connection to CT_MRDS schema
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET VERIFY OFF
|
||||
SET FEEDBACK ON
|
||||
SET ECHO OFF
|
||||
|
||||
-- Create log directory if it doesn't exist
|
||||
host mkdir log 2>nul
|
||||
|
||||
-- Generate dynamic SPOOL filename with timestamp
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/INSTALL_MARS_826_PREHOOK_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK Installation Starting
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.DATA_EXPORTER
|
||||
PROMPT Change: Column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY
|
||||
PROMPT Purpose: Support for renamed column in CT_ODS.A_LOAD_HISTORY
|
||||
PROMPT Timestamp:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_start FROM DUAL;
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Confirm installation with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with installation, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Installation aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
-- Installation steps
|
||||
PROMPT
|
||||
PROMPT Step 1/5: Deploying DATA_EXPORTER Package Specification
|
||||
PROMPT ========================================================
|
||||
@@00_MARS_826_PREHOOK_install_DATA_EXPORTER_SPEC.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2/4: Deploying DATA_EXPORTER Package Body
|
||||
PROMPT ===============================================
|
||||
@@01_MARS_826_PREHOOK_install_DATA_EXPORTER_BODY.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 3/4: Tracking Package Version
|
||||
PROMPT =====================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 4/4: Verifying Package Status
|
||||
PROMPT ======================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK Installation Completed
|
||||
PROMPT ============================================================================
|
||||
PROMPT Completion Time:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS install_end FROM DUAL;
|
||||
PROMPT
|
||||
PROMPT Installation Summary:
|
||||
PROMPT - Package: CT_MRDS.DATA_EXPORTER
|
||||
PROMPT - Version: 2.1.0 -> 2.1.1 (PATCH)
|
||||
PROMPT - Change: A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY
|
||||
PROMPT
|
||||
PROMPT
|
||||
PROMPT Log file: &_filename
|
||||
PROMPT ============================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,733 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
-- Internal shared function to process column list with T. prefix and key column mapping
|
||||
FUNCTION processColumnList(pColumnList IN VARCHAR2, pTableName IN VARCHAR2, pSchemaName IN VARCHAR2, pKeyColumnName IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllCols VARCHAR2(32767);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
-- Build list of all columns
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllCols
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
-- Add T. prefix to all columns
|
||||
vResult := 'T.' || REPLACE(vAllCols, ', ', ', T.');
|
||||
|
||||
-- Replace key column with aliased version (e.g., T.A_ETL_LOAD_SET_KEY_FK AS A_WORKFLOW_HISTORY_KEY)
|
||||
vResult := REPLACE(vResult, 'T.' || pKeyColumnName, 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY');
|
||||
|
||||
RETURN vResult;
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Check if this is the key column (e.g., A_ETL_LOAD_SET_KEY_FK) and add alias
|
||||
IF UPPER(vCurrentCol) = UPPER(pKeyColumnName) THEN
|
||||
vCurrentCol := 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSIF UPPER(vCurrentCol) = 'A_ETL_LOAD_SET_KEY' THEN
|
||||
vCurrentCol := 'T.A_ETL_LOAD_SET_KEY AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSE
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END processColumnList;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllColumnsList VARCHAR2(32767);
|
||||
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
-- Build list of all columns for the table (excluding key column to avoid duplication)
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllColumnsList
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName
|
||||
AND column_name != vKeyColumnName;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(vAllColumnsList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Dynamic column list built (excluding key): ' || vAllColumnsList, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list with T. prefix: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values from A_LOAD_HISTORY
|
||||
vSql := 'SELECT DISTINCT L.A_ETL_LOAD_SET_KEY' ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing key values query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValues.COUNT || ' unique key values to process', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value with A_WORKFLOW_HISTORY_KEY mapping
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing key value: ' || vKeyValue || ' (' || (i) || '/' || vKeyValues.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in column list' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(32000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (building dynamic list from table metadata)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth || ' (' || i || '/' || vKeyValuesYear.COUNT || ')', 'DEBUG', vParameters);
|
||||
-- Construct the query to extract data for the current key value
|
||||
-- Note: processColumnList already handles A_WORKFLOW_HISTORY_KEY aliasing
|
||||
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(vKeyValueYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(vKeyValueMonth) || '/' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) || '.parquet';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Parquet export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME.csv (without date range)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (using dynamic column list)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Fetch unique year/month combinations
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Date range: ' || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || ' to ' || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique year/month combination
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the CSV file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileBaseName) || '_' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) ||
|
||||
vFileExtension;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to CSV file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth || ' (' || i || '/' || vKeyValuesYear.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File name pattern: ' || vFileBaseName || '_' || vKeyValueYear || vKeyValueMonth || vFileExtension, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to CSV file
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vKeyValuesYear.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,166 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.1.1';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-12-04 13:10:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,91 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826-PREHOOK Master Rollback Script
|
||||
-- ============================================================================
|
||||
-- Purpose: Rollback DATA_EXPORTER package to previous version
|
||||
-- Target Schema: CT_MRDS
|
||||
-- WARNING: Only execute if you need to revert to A_WORKFLOW_HISTORY_KEY
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET VERIFY OFF
|
||||
SET FEEDBACK ON
|
||||
SET ECHO OFF
|
||||
|
||||
-- Create log directory if it doesn't exist
|
||||
host mkdir log 2>nul
|
||||
|
||||
-- Generate dynamic SPOOL filename with timestamp
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/ROLLBACK_MARS_826_PREHOOK_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK Rollback Starting
|
||||
PROMPT ============================================================================
|
||||
PROMPT WARNING: This will restore DATA_EXPORTER to use A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT
|
||||
PROMPT CRITICAL PREREQUISITES:
|
||||
PROMPT 1. CT_ODS.A_LOAD_HISTORY must have column A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT (if column is A_ETL_LOAD_SET_KEY, rollback will fail)
|
||||
PROMPT 2. No active MARS-826 exports should be running
|
||||
PROMPT
|
||||
PROMPT Timestamp:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_start FROM DUAL;
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Confirm rollback with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with rollback, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Rollback aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
-- Rollback steps (SPEC first, then BODY - standard Oracle deployment order)
|
||||
PROMPT
|
||||
PROMPT Step 1/2: Restoring Previous DATA_EXPORTER Package Specification
|
||||
PROMPT =================================================================
|
||||
@@91_MARS_826_PREHOOK_rollback_DATA_EXPORTER_SPEC.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2/2: Restoring Previous DATA_EXPORTER Package Body
|
||||
PROMPT ========================================================
|
||||
@@92_MARS_826_PREHOOK_rollback_DATA_EXPORTER_BODY.sql
|
||||
|
||||
-- Verify rollback
|
||||
PROMPT
|
||||
PROMPT Verification: Package Compilation Status
|
||||
PROMPT =========================================
|
||||
SELECT object_name, object_type, status, last_ddl_time
|
||||
FROM all_objects
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND object_name = 'DATA_EXPORTER'
|
||||
AND object_type IN ('PACKAGE', 'PACKAGE BODY')
|
||||
ORDER BY object_type;
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826-PREHOOK Rollback Completed
|
||||
PROMPT ============================================================================
|
||||
PROMPT Completion Time:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS rollback_end FROM DUAL;
|
||||
PROMPT
|
||||
PROMPT Rollback Summary:
|
||||
PROMPT - Package Body: CT_MRDS.DATA_EXPORTER
|
||||
PROMPT - Restored Version: Previous (with A_WORKFLOW_HISTORY_KEY references)
|
||||
PROMPT
|
||||
PROMPT Log file: &_filename
|
||||
PROMPT ============================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,96 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
package_name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- Add or remove entries as needed for your MARS issue
|
||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.DATA_EXPORTER'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
|
||||
-- Get package version
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT ' || vOwner || '.' || vPackageName || '.GET_VERSION() FROM DUAL' INTO vVersion;
|
||||
vPackages.EXTEND;
|
||||
vPackages(vPackages.COUNT).owner := vOwner;
|
||||
vPackages(vPackages.COUNT).package_name := vPackageName;
|
||||
vPackages(vPackages.COUNT).version := vVersion;
|
||||
|
||||
-- Track in ENV_MANAGER
|
||||
BEGIN
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
||||
pPackageAuthor => 'Grzegorz Michalski'
|
||||
);
|
||||
vCount := vCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN NULL; -- Continue even if tracking fails
|
||||
END;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN NULL; -- Skip packages that fail
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Summary:');
|
||||
DBMS_OUTPUT.PUT_LINE('--------');
|
||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || '/' || vPackageList.COUNT);
|
||||
|
||||
IF vPackages.COUNT > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Tracked Packages:');
|
||||
FOR i IN 1..vPackages.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).package_name || ' v' || vPackages(i).version);
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,62 @@
|
||||
-- ===================================================================
|
||||
-- Universal Package Version Verification Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify all tracked Oracle packages for code changes
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- USAGE:
|
||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
||||
--
|
||||
-- OUTPUT:
|
||||
-- - List of all tracked packages with their current status
|
||||
-- - OK: Package has not changed since last tracking
|
||||
-- - WARNING: Package code changed without version update
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET FEEDBACK OFF
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Package Version Verification
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
|
||||
COLUMN PACKAGE_OWNER FORMAT A15
|
||||
COLUMN PACKAGE_NAME FORMAT A20
|
||||
COLUMN VERSION FORMAT A10
|
||||
COLUMN STATUS FORMAT A80
|
||||
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION AS VERSION,
|
||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
||||
FROM (
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION,
|
||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
)
|
||||
WHERE RN = 1
|
||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Verification Complete
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
PROMPT Legend:
|
||||
PROMPT OK - Package has not changed since last tracking
|
||||
PROMPT WARNING - Package code changed without version update
|
||||
PROMPT
|
||||
PROMPT For detailed hash information, use:
|
||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
||||
PROMPT ========================================
|
||||
|
||||
SET FEEDBACK ON
|
||||
6
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826/.gitignore
vendored
Normal file
6
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
# Exclude temporary folders from version control
|
||||
confluence/
|
||||
log/
|
||||
test/
|
||||
mock_data/
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 00_MARS_826_pre_check_existing_files.sql
|
||||
-- Purpose: Display existing archive files in HIST bucket before export
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Pre-Check: Listing existing archive files in HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalFiles NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('CHECK TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('Bucket URI: ' || vBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Credential: ' || vCredentialName);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for all 19 tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_HEADER/',
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_ITEM/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES/',
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER/',
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_FORECAST_HEADER/',
|
||||
'ARCHIVE/LM/LM_FORECAST_ITEM/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_TTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_TTS_ITEM/'
|
||||
);
|
||||
|
||||
-- Check each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
-- List all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vFolders(i));
|
||||
END IF;
|
||||
|
||||
vFileCount := vFileCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name || ' (' ||
|
||||
ROUND(rec.bytes/1024/1024, 2) || ' MB)');
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: ' || vFileCount || ' files found');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
vTotalFiles := vTotalFiles + vFileCount;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-CHECK COMPLETE: Total existing files: ' || vTotalFiles);
|
||||
IF vTotalFiles = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('STATUS: No existing archive files found - ready for initial export');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Existing archive files found - export will add new files or overwrite existing ones');
|
||||
END IF;
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,76 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 01: ADHOC_ADJ Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export ADHOC_ADJ table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Tables: 3 tables (~209 records total)
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 01: Exporting ADHOC_ADJ Tables (3 tables)
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Table 1: LEGACY_ADHOC_ADJ_HEADER (~5 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_ADHOC_ADJ_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_ADHOC_ADJ_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting ADHOC_ADJ_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_ADHOC_ADJ_ITEM (~102 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_ADHOC_ADJ_ITEM...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_ADHOC_ADJ_ITEM',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting ADHOC_ADJ_ITEM: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 3: LEGACY_ADHOC_ADJ_ITEM_HEADER (~102 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_ADHOC_ADJ_ITEM_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_ADHOC_ADJ_ITEM_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ADHOC_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting ADHOC_ADJ_ITEM_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 01 completed: All ADHOC_ADJ tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,66 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 02: BALANCESHEET Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export BALANCESHEET table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Tables: 2 tables (~7.6M records total - LARGE DATASET)
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_BALANCESHEET_*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 02: Exporting BALANCESHEET Tables (2 tables, ~7.6M records)
|
||||
PROMPT WARNING: Large dataset - this may take significant time
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- START TIMING
|
||||
PROMPT
|
||||
SELECT 'START_TIME: ' || TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS TIMING FROM DUAL;
|
||||
|
||||
-- Table 1: LEGACY_BALANCESHEET_HEADER (~81,853 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_BALANCESHEET_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_BALANCESHEET_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting BALANCESHEET_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_BALANCESHEET_ITEM (~7,603,340 records - VERY LARGE)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_BALANCESHEET_ITEM (LARGE TABLE ~7.6M records)...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_BALANCESHEET_ITEM',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_BALANCESHEET_ITEM'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_BALANCESHEET_ITEM exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting BALANCESHEET_ITEM: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- END TIMING
|
||||
PROMPT
|
||||
SELECT 'END_TIME: ' || TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS TIMING FROM DUAL;
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 02 completed: All BALANCESHEET tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,76 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 03: CSM_ADJ Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export CSM_ADJ table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Tables: 3 tables (~7,756 records total)
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_CSM_ADJUSTMENTS_*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 03: Exporting CSM_ADJ Tables (3 tables)
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Table 1: LEGACY_CSM_ADJ_HEADER (~186 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_CSM_ADJ_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_CSM_ADJ_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting CSM_ADJ_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_CSM_ADJ_ITEM (~3,785 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_CSM_ADJ_ITEM...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_CSM_ADJ_ITEM',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting CSM_ADJ_ITEM: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 3: LEGACY_CSM_ADJ_ITEM_HEADER (~3,785 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_CSM_ADJ_ITEM_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_CSM_ADJ_ITEM_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_CSM_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting CSM_ADJ_ITEM_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 03 completed: All CSM_ADJ tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,66 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 04: STANDING_FACILITY Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export STANDING_FACILITY table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Tables: 2 tables (~1.2M records total - LARGE DATASET)
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_STANDING_FACILITIES*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 04: Exporting STANDING_FACILITY Tables (2 tables, ~1.2M records)
|
||||
PROMPT WARNING: Large dataset - this may take significant time
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- START TIMING
|
||||
PROMPT
|
||||
SELECT 'START_TIME: ' || TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS TIMING FROM DUAL;
|
||||
|
||||
-- Table 1: LEGACY_STANDING_FACILITY (~1,205,002 records - VERY LARGE)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_STANDING_FACILITY (LARGE TABLE ~1.2M records)...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_STANDING_FACILITY',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting STANDING_FACILITY: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_STANDING_FACILITY_HEADER (~2,647 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_STANDING_FACILITY_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_STANDING_FACILITY_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_STANDING_FACILITY_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting STANDING_FACILITY_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- END TIMING
|
||||
PROMPT
|
||||
SELECT 'END_TIME: ' || TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS TIMING FROM DUAL;
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 04 completed: All STANDING_FACILITY tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,58 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 05: MRR_IND_CURR_ACC Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export MRR_IND_CURR_ACC table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Schema: OU_MRR (different from other groups)
|
||||
-- Tables: 2 tables
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_CURRENT_ACCOUNTS_*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 05: Exporting MRR_IND_CURR_ACC Tables (2 tables from OU_MRR schema)
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Table 1: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER
|
||||
PROMPT
|
||||
PROMPT Exporting OU_MRR.LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_MRR',
|
||||
pTableName => 'LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting MRR_IND_CURRENT_ACCOUNT_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM
|
||||
PROMPT
|
||||
PROMPT Exporting OU_MRR.LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_MRR',
|
||||
pTableName => 'LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting MRR_IND_CURRENT_ACCOUNT_ITEM: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 05 completed: All MRR_IND_CURRENT_ACCOUNT tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,66 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 06: FORECAST Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export FORECAST table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Tables: 2 tables (~21.6M records total - VERY LARGE DATASET)
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_FORECAST_*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 06: Exporting FORECAST Tables (2 tables, ~21.6M records)
|
||||
PROMPT WARNING: LARGEST DATASET - this may take significant time
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- START TIMING
|
||||
PROMPT
|
||||
SELECT 'START_TIME: ' || TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS TIMING FROM DUAL;
|
||||
|
||||
-- Table 1: LEGACY_FORECAST_HEADER (~42,504 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_FORECAST_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_FORECAST_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting FORECAST_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_FORECAST_ITEM (~21,643,855 records - LARGEST TABLE)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_FORECAST_ITEM (LARGEST TABLE ~21.6M records)...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_FORECAST_ITEM',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_FORECAST_ITEM'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_FORECAST_ITEM exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting FORECAST_ITEM: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- END TIMING
|
||||
PROMPT
|
||||
SELECT 'END_TIME: ' || TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS TIMING FROM DUAL;
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 06 completed: All FORECAST tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,76 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 07: QR_ADJ Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export QR_ADJ table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Tables: 3 tables (~62,573 records total)
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_QRE_ADJUSTMENTS_*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 07: Exporting QR_ADJ Tables (3 tables)
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Table 1: LEGACY_QR_ADJ_HEADER (~123 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_QR_ADJ_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_QR_ADJ_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting QR_ADJ_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_QR_ADJ_ITEM (~59,952 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_QR_ADJ_ITEM...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_QR_ADJ_ITEM',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting QR_ADJ_ITEM: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 3: LEGACY_QR_ADJ_ITEM_HEADER (~2,498 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_QR_ADJ_ITEM_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_QR_ADJ_ITEM_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_QR_ADJ_ITEM_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting QR_ADJ_ITEM_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 07 completed: All QR_ADJ tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,57 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Export Step 08: TTS Tables
|
||||
-- ============================================================================
|
||||
-- Purpose: Export TTS table group to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Tables: 2 tables (~1,120 records total)
|
||||
-- Target: mrds_hist_dev/ARCHIVE/LM/LM_TTS_*
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Step 08: Exporting TTS Tables (2 tables)
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Table 1: LEGACY_TTS_HEADER (~560 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_TTS_HEADER...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_TTS_HEADER',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_TTS_HEADER'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_HEADER exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting TTS_HEADER: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Table 2: LEGACY_TTS_ITEM (~560 records)
|
||||
PROMPT
|
||||
PROMPT Exporting OU_LM.LEGACY_TTS_ITEM...
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_LM',
|
||||
pTableName => 'LEGACY_TTS_ITEM',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/LM/LM_TTS_ITEM'
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_TTS_ITEM exported');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR exporting TTS_ITEM: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 08 completed: All TTS tables exported
|
||||
PROMPT
|
||||
@@ -0,0 +1,138 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 09_MARS_826_verify_exports.sql
|
||||
-- Purpose: Verify all exported Parquet files exist in HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Verification: Checking exported files in HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalFiles NUMBER := 0;
|
||||
vTotalSize NUMBER := 0;
|
||||
vFolderSize NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
|
||||
TYPE t_table_list IS TABLE OF VARCHAR2(100);
|
||||
vTables t_table_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder and table lists for all 19 tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_HEADER/',
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_ITEM/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES/',
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER/',
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_FORECAST_HEADER/',
|
||||
'ARCHIVE/LM/LM_FORECAST_ITEM/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_TTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_TTS_ITEM/'
|
||||
);
|
||||
|
||||
vTables := t_table_list(
|
||||
'ADHOC_ADJ_HEADER',
|
||||
'ADHOC_ADJ_ITEM',
|
||||
'ADHOC_ADJ_ITEM_HEADER',
|
||||
'BALANCESHEET_HEADER',
|
||||
'BALANCESHEET_ITEM',
|
||||
'CSM_ADJ_HEADER',
|
||||
'CSM_ADJ_ITEM',
|
||||
'CSM_ADJ_ITEM_HEADER',
|
||||
'STANDING_FACILITY',
|
||||
'STANDING_FACILITY_HEADER',
|
||||
'MRR_IND_CURRENT_ACCOUNT_HEADER',
|
||||
'MRR_IND_CURRENT_ACCOUNT_ITEM',
|
||||
'FORECAST_HEADER',
|
||||
'FORECAST_ITEM',
|
||||
'QR_ADJ_HEADER',
|
||||
'QR_ADJ_ITEM',
|
||||
'QR_ADJ_ITEM_HEADER',
|
||||
'TTS_HEADER',
|
||||
'TTS_ITEM'
|
||||
);
|
||||
|
||||
-- Check each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
vFolderSize := 0;
|
||||
|
||||
-- List all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vFolderSize := vFolderSize + rec.bytes;
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[OK] ' || RPAD(vTables(i), 35) || ' : ' ||
|
||||
LPAD(vFileCount, 5) || ' files, ' ||
|
||||
LPAD(ROUND(vFolderSize/1024/1024, 2), 10) || ' MB');
|
||||
vTotalFiles := vTotalFiles + vFileCount;
|
||||
vTotalSize := vTotalSize + vFolderSize;
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[MISSING] ' || vTables(i) || ' - NO FILES FOUND!');
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION SUMMARY:');
|
||||
DBMS_OUTPUT.PUT_LINE(' Total tables exported: ' || vTables.COUNT);
|
||||
DBMS_OUTPUT.PUT_LINE(' Total files created: ' || vTotalFiles);
|
||||
DBMS_OUTPUT.PUT_LINE(' Total export size: ' || ROUND(vTotalSize/1024/1024/1024, 2) || ' GB');
|
||||
|
||||
IF vTotalFiles = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: No export files found! Export may have failed.');
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Export verification failed - no files found');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('STATUS: Export verification SUCCESSFUL');
|
||||
END IF;
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION ERROR: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,144 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 10_MARS_826_verify_record_counts.sql
|
||||
-- Purpose: Compare source table record counts with archive table counts in ODS
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- Updated: 2025-12-15
|
||||
-- MARS Issue: MARS-826
|
||||
-- Note: Validates export process by comparing source vs archive record counts
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Record Count Verification: Source vs Archive Tables
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vSourceCount NUMBER;
|
||||
vArchiveCount NUMBER;
|
||||
vTotalSourceRecords NUMBER := 0;
|
||||
vTotalArchiveRecords NUMBER := 0;
|
||||
vTableName VARCHAR2(100);
|
||||
vSchemaName VARCHAR2(30);
|
||||
vArchiveTableName VARCHAR2(100);
|
||||
|
||||
TYPE t_table_rec IS RECORD (
|
||||
schema_name VARCHAR2(30),
|
||||
table_name VARCHAR2(100),
|
||||
display_name VARCHAR2(100),
|
||||
archive_table_name VARCHAR2(100)
|
||||
);
|
||||
TYPE t_table_list IS TABLE OF t_table_rec;
|
||||
vTables t_table_list;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE(RPAD('TABLE NAME', 40) || RPAD('SCHEMA', 12) || LPAD('SOURCE COUNT', 15) || LPAD('ARCHIVE COUNT', 15) || LPAD('DIFF', 10) || ' STATUS');
|
||||
DBMS_OUTPUT.PUT_LINE(RPAD('-', 40, '-') || RPAD('-', 12, '-') || LPAD('-', 15, '-') || LPAD('-', 15, '-') || LPAD('-', 10, '-') || ' ------');
|
||||
|
||||
-- Initialize table list with schema, table names, and corresponding archive tables
|
||||
vTables := t_table_list(
|
||||
t_table_rec('OU_LM', 'LEGACY_ADHOC_ADJ_HEADER', 'LEGACY_ADHOC_ADJ_HEADER', 'LM_ADHOC_ADJUSTMENTS_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_ADHOC_ADJ_ITEM', 'LEGACY_ADHOC_ADJ_ITEM', 'LM_ADHOC_ADJUSTMENTS_ITEM_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_ADHOC_ADJ_ITEM_HEADER', 'LEGACY_ADHOC_ADJ_ITEM_HEADER', 'LM_ADHOC_ADJUSTMENTS_ITEM_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_BALANCESHEET_HEADER', 'LEGACY_BALANCESHEET_HEADER', 'LM_BALANCESHEET_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_BALANCESHEET_ITEM', 'LEGACY_BALANCESHEET_ITEM', 'LM_BALANCESHEET_ITEM_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_CSM_ADJ_HEADER', 'LEGACY_CSM_ADJ_HEADER', 'LM_CSM_ADJUSTMENTS_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_CSM_ADJ_ITEM', 'LEGACY_CSM_ADJ_ITEM', 'LM_CSM_ADJUSTMENTS_ITEM_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_CSM_ADJ_ITEM_HEADER', 'LEGACY_CSM_ADJ_ITEM_HEADER', 'LM_CSM_ADJUSTMENTS_ITEM_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_STANDING_FACILITY', 'LEGACY_STANDING_FACILITY', 'LM_STANDING_FACILITIES_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_STANDING_FACILITY_HEADER', 'LEGACY_STANDING_FACILITY_HEADER', 'LM_STANDING_FACILITIES_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_MRR', 'LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER', 'LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER', 'LM_CURRENT_ACCOUNTS_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_MRR', 'LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM', 'LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM', 'LM_CURRENT_ACCOUNTS_ITEM_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_FORECAST_HEADER', 'LEGACY_FORECAST_HEADER', 'LM_FORECAST_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_FORECAST_ITEM', 'LEGACY_FORECAST_ITEM', 'LM_FORECAST_ITEM_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_QR_ADJ_HEADER', 'LEGACY_QR_ADJ_HEADER', 'LM_QRE_ADJUSTMENTS_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_QR_ADJ_ITEM', 'LEGACY_QR_ADJ_ITEM', 'LM_QRE_ADJUSTMENTS_ITEM_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_QR_ADJ_ITEM_HEADER', 'LEGACY_QR_ADJ_ITEM_HEADER', 'LM_QRE_ADJUSTMENTS_ITEM_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_TTS_HEADER', 'LEGACY_TTS_HEADER', 'LM_TTS_HEADER_ARCHIVE'),
|
||||
t_table_rec('OU_LM', 'LEGACY_TTS_ITEM', 'LEGACY_TTS_ITEM', 'LM_TTS_ITEM_ARCHIVE')
|
||||
);
|
||||
|
||||
-- Query each table for record count comparison
|
||||
FOR i IN 1..vTables.COUNT LOOP
|
||||
BEGIN
|
||||
-- Get source table count
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ' || vTables(i).schema_name || '.' || vTables(i).table_name
|
||||
INTO vSourceCount;
|
||||
|
||||
-- Get archive table count
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.' || vTables(i).archive_table_name
|
||||
INTO vArchiveCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
vArchiveCount := -1; -- Mark as archive error
|
||||
END;
|
||||
|
||||
-- Display comparison
|
||||
DBMS_OUTPUT.PUT_LINE(
|
||||
RPAD(vTables(i).display_name, 40) ||
|
||||
RPAD(vTables(i).schema_name, 12) ||
|
||||
LPAD(TO_CHAR(vSourceCount), 15) ||
|
||||
LPAD(CASE WHEN vArchiveCount = -1 THEN 'ERROR' ELSE TO_CHAR(vArchiveCount) END, 15) ||
|
||||
LPAD(CASE WHEN vArchiveCount = -1 THEN 'N/A' ELSE TO_CHAR(vSourceCount - vArchiveCount) END, 10) ||
|
||||
' ' || CASE
|
||||
WHEN vArchiveCount = -1 THEN 'ARCH_ERROR'
|
||||
WHEN vSourceCount = vArchiveCount THEN 'MATCH'
|
||||
WHEN vSourceCount > vArchiveCount THEN 'SOURCE_HIGHER'
|
||||
ELSE 'ARCHIVE_HIGHER'
|
||||
END
|
||||
);
|
||||
|
||||
vTotalSourceRecords := vTotalSourceRecords + vSourceCount;
|
||||
IF vArchiveCount > 0 THEN
|
||||
vTotalArchiveRecords := vTotalArchiveRecords + vArchiveCount;
|
||||
END IF;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(
|
||||
RPAD(vTables(i).display_name, 40) ||
|
||||
RPAD(vTables(i).schema_name, 12) ||
|
||||
LPAD('ERROR', 15) ||
|
||||
LPAD('ERROR', 15) ||
|
||||
LPAD('N/A', 10) ||
|
||||
' SOURCE_ERROR'
|
||||
);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('RECORD COUNT SUMMARY:');
|
||||
DBMS_OUTPUT.PUT_LINE(' Total tables compared: ' || vTables.COUNT);
|
||||
DBMS_OUTPUT.PUT_LINE(' Total source records: ' || TO_CHAR(vTotalSourceRecords));
|
||||
DBMS_OUTPUT.PUT_LINE(' Total archive records: ' || TO_CHAR(vTotalArchiveRecords));
|
||||
DBMS_OUTPUT.PUT_LINE(' Total difference: ' || TO_CHAR(vTotalSourceRecords - vTotalArchiveRecords));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
IF vTotalSourceRecords = vTotalArchiveRecords THEN
|
||||
DBMS_OUTPUT.PUT_LINE('VALIDATION RESULT: PASS - Source and Archive record counts match perfectly');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('VALIDATION RESULT: REVIEW - Source and Archive record counts differ');
|
||||
DBMS_OUTPUT.PUT_LINE(' Check individual table statuses above for detailed analysis');
|
||||
END IF;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('LEGEND:');
|
||||
DBMS_OUTPUT.PUT_LINE(' MATCH - Source and Archive counts are identical');
|
||||
DBMS_OUTPUT.PUT_LINE(' SOURCE_HIGHER - Source has more records than Archive');
|
||||
DBMS_OUTPUT.PUT_LINE(' ARCHIVE_HIGHER - Archive has more records than Source');
|
||||
DBMS_OUTPUT.PUT_LINE(' ARCH_ERROR - Archive table access error');
|
||||
DBMS_OUTPUT.PUT_LINE(' SOURCE_ERROR - Source table access error');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION ERROR: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,98 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 91_MARS_826_rollback_ADHOC_ADJ_tables.sql
|
||||
-- Purpose: Rollback ADHOC_ADJ tables export - delete files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_LM.ADHOC_ADJ_HEADER (~1 records)
|
||||
-- - OU_LM.ADHOC_ADJ_ITEM (~104 records)
|
||||
-- - OU_LM.ADHOC_ADJ_ITEM_HEADER (~104 records)
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting ADHOC_ADJ table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for ADHOC_ADJ tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,96 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 92_MARS_826_rollback_BALANCESHEET_tables.sql
|
||||
-- Purpose: Rollback BALANCESHEET tables export - delete files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_LM.BALANCESHEET_HEADER (~133,816 records)
|
||||
-- - OU_LM.BALANCESHEET_ITEM (~7,653,012 records) - LARGE TABLE
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_BALANCESHEET_*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting BALANCESHEET table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for BALANCESHEET tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_HEADER/',
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_ITEM/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,98 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 93_MARS_826_rollback_CSM_ADJ_tables.sql
|
||||
-- Purpose: Rollback CSM_ADJ tables export - delete files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_LM.CSM_ADJ_HEADER (~388 records)
|
||||
-- - OU_LM.CSM_ADJ_ITEM (~3,980 records)
|
||||
-- - OU_LM.CSM_ADJ_ITEM_HEADER (~3,388 records)
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_CSM_ADJUSTMENTS_*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting CSM_ADJ table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for CSM_ADJ tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,96 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 94_MARS_826_rollback_STANDING_FACILITY_tables.sql
|
||||
-- Purpose: Rollback STANDING_FACILITY tables export - delete files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_LM.STANDING_FACILITY (~1,258,920 records)
|
||||
-- - OU_LM.STANDING_FACILITY_HEADER (~62,946 records)
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_STANDING_FACILITIES*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting STANDING_FACILITY table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for STANDING_FACILITY tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES/',
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,96 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 95_MARS_826_rollback_MRR_IND_CURR_ACC_tables.sql
|
||||
-- Purpose: Rollback MRR_IND_CURR_ACC table exports by deleting Parquet files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_MRR.MRR_IND_CURRENT_ACCOUNT_HEADER (~16,696 records)
|
||||
-- - OU_MRR.MRR_IND_CURRENT_ACCOUNT_ITEM (~350,632 records)
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_CURRENT_ACCOUNTS_*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting MRR_IND_CURR_ACC table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for MRR_IND_CURR_ACC tables (OU_MRR schema)
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,96 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 96_MARS_826_rollback_FORECAST_tables.sql
|
||||
-- Purpose: Rollback FORECAST tables export - delete files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_LM.FORECAST_HEADER (~72,932 records)
|
||||
-- - OU_LM.FORECAST_ITEM (~21,679,568 records) - LARGEST TABLE
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_FORECAST_*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting FORECAST table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for FORECAST tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_FORECAST_HEADER/',
|
||||
'ARCHIVE/LM/LM_FORECAST_ITEM/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,98 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 97_MARS_826_rollback_QR_ADJ_tables.sql
|
||||
-- Purpose: Rollback QR_ADJ tables export - delete files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_LM.QR_ADJ_HEADER (~3,121 records)
|
||||
-- - OU_LM.QR_ADJ_ITEM (~29,576 records)
|
||||
-- - OU_LM.QR_ADJ_ITEM_HEADER (~29,876 records)
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_QRE_ADJUSTMENTS_*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting QR_ADJ table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for QR_ADJ tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,96 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 98_MARS_826_rollback_TTS_tables.sql
|
||||
-- Purpose: Rollback TTS tables export - delete files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Tables:
|
||||
-- - OU_LM.TTS_HEADER (~56 records)
|
||||
-- - OU_LM.TTS_ITEM (~1,064 records)
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/LM_TTS_*
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback: Deleting TTS table exports from HIST bucket
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vDeletedCount NUMBER := 0;
|
||||
vTotalDeleted NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('START TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder list for TTS tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_TTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_TTS_ITEM/'
|
||||
);
|
||||
|
||||
-- Process each folder
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE('Processing folder: ' || vFolders(i));
|
||||
vFileCount := 0;
|
||||
vDeletedCount := 0;
|
||||
|
||||
-- List and delete all Parquet files in the folder
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
|
||||
BEGIN
|
||||
-- Delete the Parquet file
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || vFolders(i) || rec.object_name
|
||||
);
|
||||
|
||||
vDeletedCount := vDeletedCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vDeletedCount || '] Deleted: ' || rec.object_name);
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR deleting ' || rec.object_name || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalDeleted := vTotalDeleted + vDeletedCount;
|
||||
DBMS_OUTPUT.PUT_LINE('Folder summary: Found ' || vFileCount || ' files, deleted ' || vDeletedCount || ' files');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK COMPLETE: Total files deleted: ' || vTotalDeleted);
|
||||
DBMS_OUTPUT.PUT_LINE('END TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('FATAL ERROR: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('Error occurred at: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,155 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 99_MARS_826_verify_rollback.sql
|
||||
-- Purpose: Verify complete cleanup of MARS-826 exported files from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-02
|
||||
-- Updated: 2025-12-15
|
||||
-- MARS Issue: MARS-826
|
||||
-- Target Location: mrds_hist_dev/ARCHIVE/LM/
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-826 Rollback Verification: Complete Cleanup Check
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalFiles NUMBER := 0;
|
||||
vTablesWithFiles NUMBER := 0;
|
||||
vTablesClean NUMBER := 0;
|
||||
vRollbackSuccess BOOLEAN := TRUE;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vFolders t_folder_list;
|
||||
|
||||
TYPE t_table_list IS TABLE OF VARCHAR2(100);
|
||||
vTables t_table_list;
|
||||
BEGIN
|
||||
-- Get bucket URI and credential from FILE_MANAGER configuration
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('Bucket URI: ' || vBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Checking 19 MARS-826 export folders for remaining files...');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder and table lists for all 19 tables
|
||||
vFolders := t_folder_list(
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_HEADER/',
|
||||
'ARCHIVE/LM/LM_BALANCESHEET_ITEM/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES/',
|
||||
'ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER/',
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_FORECAST_HEADER/',
|
||||
'ARCHIVE/LM/LM_FORECAST_ITEM/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM/',
|
||||
'ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER/',
|
||||
'ARCHIVE/LM/LM_TTS_HEADER/',
|
||||
'ARCHIVE/LM/LM_TTS_ITEM/'
|
||||
);
|
||||
|
||||
vTables := t_table_list(
|
||||
'ADHOC_ADJ_HEADER',
|
||||
'ADHOC_ADJ_ITEM',
|
||||
'ADHOC_ADJ_ITEM_HEADER',
|
||||
'BALANCESHEET_HEADER',
|
||||
'BALANCESHEET_ITEM',
|
||||
'CSM_ADJ_HEADER',
|
||||
'CSM_ADJ_ITEM',
|
||||
'CSM_ADJ_ITEM_HEADER',
|
||||
'STANDING_FACILITY',
|
||||
'STANDING_FACILITY_HEADER',
|
||||
'MRR_IND_CURRENT_ACCOUNT_HEADER',
|
||||
'MRR_IND_CURRENT_ACCOUNT_ITEM',
|
||||
'FORECAST_HEADER',
|
||||
'FORECAST_ITEM',
|
||||
'QR_ADJ_HEADER',
|
||||
'QR_ADJ_ITEM',
|
||||
'QR_ADJ_ITEM_HEADER',
|
||||
'TTS_HEADER',
|
||||
'TTS_ITEM'
|
||||
);
|
||||
|
||||
-- Check each folder for remaining files
|
||||
FOR i IN 1..vFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
-- Count files in folder (using exception handling for missing folders)
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || vFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ISSUE: ' || RPAD(vTables(i), 40) || ' - Files still exist:');
|
||||
vRollbackSuccess := FALSE;
|
||||
vTablesWithFiles := vTablesWithFiles + 1;
|
||||
END IF;
|
||||
|
||||
vFileCount := vFileCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name || ' (' || ROUND(rec.bytes/1024, 1) || ' KB)');
|
||||
END LOOP;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
-- Folder doesn't exist or access error - this is actually good for rollback
|
||||
NULL;
|
||||
END;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('OK: ' || RPAD(vTables(i), 40) || ' - Clean (no files)');
|
||||
vTablesClean := vTablesClean + 1;
|
||||
ELSE
|
||||
vTotalFiles := vTotalFiles + vFileCount;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK VERIFICATION SUMMARY:');
|
||||
DBMS_OUTPUT.PUT_LINE(' Total tables checked: ' || vTables.COUNT);
|
||||
DBMS_OUTPUT.PUT_LINE(' Tables completely clean: ' || vTablesClean);
|
||||
DBMS_OUTPUT.PUT_LINE(' Tables with remaining files: ' || vTablesWithFiles);
|
||||
DBMS_OUTPUT.PUT_LINE(' Total remaining files: ' || vTotalFiles);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vRollbackSuccess THEN
|
||||
DBMS_OUTPUT.PUT_LINE('RESULT: ROLLBACK SUCCESSFUL');
|
||||
DBMS_OUTPUT.PUT_LINE('STATUS: All MARS-826 exported files have been completely removed');
|
||||
DBMS_OUTPUT.PUT_LINE('ACTION: No further action required - rollback is complete');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('RESULT: ROLLBACK INCOMPLETE');
|
||||
DBMS_OUTPUT.PUT_LINE('STATUS: ' || vTotalFiles || ' files remain in ' || vTablesWithFiles || ' folder(s)');
|
||||
DBMS_OUTPUT.PUT_LINE('ACTION: 1. Review rollback scripts for completeness');
|
||||
DBMS_OUTPUT.PUT_LINE(' 2. Manual cleanup may be required for remaining files');
|
||||
DBMS_OUTPUT.PUT_LINE(' 3. Check file permissions and bucket access');
|
||||
END IF;
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION ERROR: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
301
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826/README.md
Normal file
301
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826/README.md
Normal file
@@ -0,0 +1,301 @@
|
||||
# MARS-826: CSDB Historical Data Export to HIST Bucket
|
||||
|
||||
## 🎯 Implementation Status: 🚧 IN DEVELOPMENT
|
||||
|
||||
**Implementation Date:** 2025-12-02
|
||||
**Database Version:** Oracle 23ai
|
||||
**Package Used:** CT_MRDS.DATA_EXPORTER
|
||||
**Status:** Development ⚙️
|
||||
|
||||
---
|
||||
|
||||
## 📋 Overview
|
||||
|
||||
MARS-826 implements one-time historical data export for CSDB tables from operational database (OU_LM/OU_MRR schemas) to HIST bucket (ARCHIVE) in Parquet format with Hive-style partitioning.
|
||||
|
||||
### Key Objectives
|
||||
- **Data Migration**: Move historical CSDB data from LEGACY_ tables to HIST bucket
|
||||
- **Column Mapping**: Map A_ETL_LOAD_SET_KEY[_FK] → A_WORKFLOW_HISTORY_KEY
|
||||
- **Bulk Export**: Process 19 tables with ~31M total records
|
||||
- **Weekend Execution**: Non-critical timing allows weekend bulk processing
|
||||
- **Prerequisites**: LEGACY_ prefixed tables must exist before running export
|
||||
|
||||
---
|
||||
|
||||
## 📁 Project Structure
|
||||
|
||||
```
|
||||
MARS_Packages/REL01_POST_DEACTIVATION/MARS-826/
|
||||
├── install_mars826.sql # 📥 Main Installation Script
|
||||
├── 00_MARS_826_pre_check_existing_files.sql # Pre-check: Existing archive files
|
||||
├── 01_MARS_826_export_ADHOC_ADJ_tables.sql # Export ADHOC_ADJ (3 tables)
|
||||
├── 02_MARS_826_export_BALANCESHEET_tables.sql # Export BALANCESHEET (2 tables, 7.6M records)
|
||||
├── 03_MARS_826_export_CSM_ADJ_tables.sql # Export CSM_ADJ (3 tables)
|
||||
├── 04_MARS_826_export_STANDING_FACILITY_tables.sql # Export STANDING_FACILITY (2 tables, 1.2M records)
|
||||
├── 05_MARS_826_export_MRR_IND_CURR_ACC_tables.sql # Export MRR_IND_CURR_ACC (2 tables)
|
||||
├── 06_MARS_826_export_FORECAST_tables.sql # Export FORECAST (2 tables, 21.6M records)
|
||||
├── 07_MARS_826_export_QR_ADJ_tables.sql # Export QR_ADJ (3 tables)
|
||||
├── 08_MARS_826_export_TTS_tables.sql # Export TTS (2 tables)
|
||||
├── 09_MARS_826_verify_exports.sql # Verification: File listing
|
||||
├── 10_MARS_826_verify_record_counts.sql # Verification: Record count comparison
|
||||
├── rollback_mars826.sql # 🔄 Main Rollback Script
|
||||
├── 91_MARS_826_rollback_ADHOC_ADJ_tables.sql # Rollback ADHOC_ADJ
|
||||
├── 92_MARS_826_rollback_BALANCESHEET_tables.sql # Rollback BALANCESHEET
|
||||
├── 93_MARS_826_rollback_CSM_ADJ_tables.sql # Rollback CSM_ADJ
|
||||
├── 94_MARS_826_rollback_STANDING_FACILITY_tables.sql # Rollback STANDING_FACILITY
|
||||
├── 95_MARS_826_rollback_MRR_IND_CURR_ACC_tables.sql # Rollback MRR_IND_CURR_ACC
|
||||
├── 96_MARS_826_rollback_FORECAST_tables.sql # Rollback FORECAST
|
||||
├── 97_MARS_826_rollback_QR_ADJ_tables.sql # Rollback QR_ADJ
|
||||
├── 98_MARS_826_rollback_TTS_tables.sql # Rollback TTS
|
||||
├── 99_MARS_826_verify_rollback.sql # Rollback Verification
|
||||
└── README.md # 📝 This Documentation
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📊 Tables to Export (19 Total)
|
||||
|
||||
### Group 1: ADHOC_ADJ Tables (3 tables, ~209 records)
|
||||
1. **OU_LM.LEGACY_ADHOC_ADJ_HEADER** → history/ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_HEADER (~5 records)
|
||||
2. **OU_LM.LEGACY_ADHOC_ADJ_ITEM** → history/ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM (~102 records)
|
||||
3. **OU_LM.LEGACY_ADHOC_ADJ_ITEM_HEADER** → history/ARCHIVE/LM/LM_ADHOC_ADJUSTMENTS_ITEM_HEADER (~102 records)
|
||||
|
||||
### Group 2: BALANCESHEET Tables (2 tables, ~7.6M records) ⚠️ LARGE
|
||||
4. **OU_LM.LEGACY_BALANCESHEET_HEADER** → history/ARCHIVE/LM/LM_BALANCESHEET_HEADER (~81,853 records)
|
||||
5. **OU_LM.LEGACY_BALANCESHEET_ITEM** → history/ARCHIVE/LM/LM_BALANCESHEET_ITEM (~7,603,340 records)
|
||||
|
||||
### Group 3: CSM_ADJ Tables (3 tables, ~7,756 records)
|
||||
6. **OU_LM.LEGACY_CSM_ADJ_HEADER** → history/ARCHIVE/LM/LM_CSM_ADJUSTMENTS_HEADER (~186 records)
|
||||
7. **OU_LM.LEGACY_CSM_ADJ_ITEM** → history/ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM (~3,785 records)
|
||||
8. **OU_LM.LEGACY_CSM_ADJ_ITEM_HEADER** → history/ARCHIVE/LM/LM_CSM_ADJUSTMENTS_ITEM_HEADER (~3,785 records)
|
||||
|
||||
### Group 4: STANDING_FACILITY Tables (2 tables, ~1.2M records) ⚠️ LARGE
|
||||
9. **OU_LM.LEGACY_STANDING_FACILITY** → history/ARCHIVE/LM/LM_STANDING_FACILITIES (~1,205,002 records)
|
||||
10. **OU_LM.LEGACY_STANDING_FACILITY_HEADER** → history/ARCHIVE/LM/LM_STANDING_FACILITIES_HEADER (~2,647 records)
|
||||
|
||||
### Group 5: MRR_IND_CURR_ACC Tables (2 tables - OU_MRR schema)
|
||||
11. **OU_MRR.LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER** → history/ARCHIVE/LM/LM_CURRENT_ACCOUNTS_HEADER
|
||||
12. **OU_MRR.LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM** → history/ARCHIVE/LM/LM_CURRENT_ACCOUNTS_ITEM
|
||||
|
||||
### Group 6: FORECAST Tables (2 tables, ~21.6M records) ⚠️ VERY LARGE
|
||||
13. **OU_LM.LEGACY_FORECAST_HEADER** → history/ARCHIVE/LM/LM_FORECAST_HEADER (~42,504 records)
|
||||
14. **OU_LM.LEGACY_FORECAST_ITEM** → history/ARCHIVE/LM/LM_FORECAST_ITEM (~21,643,855 records)
|
||||
|
||||
### Group 7: QR_ADJ Tables (3 tables, ~62,573 records)
|
||||
15. **OU_LM.LEGACY_QR_ADJ_HEADER** → history/ARCHIVE/LM/LM_QRE_ADJUSTMENTS_HEADER (~123 records)
|
||||
16. **OU_LM.LEGACY_QR_ADJ_ITEM** → history/ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM (~59,952 records)
|
||||
17. **OU_LM.LEGACY_QR_ADJ_ITEM_HEADER** → history/ARCHIVE/LM/LM_QRE_ADJUSTMENTS_ITEM_HEADER (~2,498 records)
|
||||
|
||||
### Group 8: TTS Tables (2 tables, ~1,120 records)
|
||||
18. **OU_LM.LEGACY_TTS_HEADER** → history/ARCHIVE/LM/LM_TTS_HEADER (~560 records)
|
||||
19. **OU_LM.LEGACY_TTS_ITEM** → history/ARCHIVE/LM/LM_TTS_ITEM (~560 records)
|
||||
|
||||
**Total Records:** ~31,000,000 records across 19 tables
|
||||
|
||||
### Complete Table List
|
||||
1. OU_LM.LEGACY_ADHOC_ADJ_HEADER
|
||||
2. OU_LM.LEGACY_ADHOC_ADJ_ITEM
|
||||
3. OU_LM.LEGACY_ADHOC_ADJ_ITEM_HEADER
|
||||
4. OU_LM.LEGACY_BALANCESHEET_HEADER
|
||||
5. OU_LM.LEGACY_BALANCESHEET_ITEM
|
||||
6. OU_LM.LEGACY_CSM_ADJ_HEADER
|
||||
7. OU_LM.LEGACY_CSM_ADJ_ITEM
|
||||
8. OU_LM.LEGACY_CSM_ADJ_ITEM_HEADER
|
||||
9. OU_LM.LEGACY_STANDING_FACILITY
|
||||
10. OU_LM.LEGACY_STANDING_FACILITY_HEADER
|
||||
11. OU_MRR.LEGACY_MRR_IND_CURRENT_ACCOUNT_HEADER
|
||||
12. OU_MRR.LEGACY_MRR_IND_CURRENT_ACCOUNT_ITEM
|
||||
13. OU_LM.LEGACY_FORECAST_HEADER
|
||||
14. OU_LM.LEGACY_FORECAST_ITEM
|
||||
15. OU_LM.LEGACY_QR_ADJ_HEADER
|
||||
16. OU_LM.LEGACY_QR_ADJ_ITEM
|
||||
17. OU_LM.LEGACY_QR_ADJ_ITEM_HEADER
|
||||
18. OU_LM.LEGACY_TTS_HEADER
|
||||
19. OU_LM.LEGACY_TTS_ITEM
|
||||
|
||||
---
|
||||
|
||||
## 🔄 Column Mapping
|
||||
|
||||
All tables require mapping of the key column used for partitioning:
|
||||
|
||||
| Old Column Name | New Column Name | Tables Affected |
|
||||
|----------------|-----------------|-----------------|
|
||||
| `A_ETL_LOAD_SET_KEY_FK` | `A_WORKFLOW_HISTORY_KEY` | ADHOC_ADJ (3), CSM_ADJ (3), STANDING_FACILITY (2), FORECAST (2), QR_ADJ (3), TTS (2) |
|
||||
| `A_ETL_LOAD_SET_KEY` | `A_WORKFLOW_HISTORY_KEY` | BALANCESHEET (2), MRR_IND_CURR_ACC (2) |
|
||||
|
||||
**Mapping Implementation:**
|
||||
```sql
|
||||
-- Example for tables with A_ETL_LOAD_SET_KEY_FK
|
||||
pColumnList => 'T.COL1, T.COL2, ..., T.A_ETL_LOAD_SET_KEY_FK AS A_WORKFLOW_HISTORY_KEY'
|
||||
|
||||
-- Example for tables with A_ETL_LOAD_SET_KEY
|
||||
pColumnList => 'T.COL1, T.COL2, ..., T.A_ETL_LOAD_SET_KEY AS A_WORKFLOW_HISTORY_KEY'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📥 Installation
|
||||
|
||||
### Prerequisites
|
||||
- Oracle Database 23ai with CT_MRDS.DATA_EXPORTER package
|
||||
- Access to OU_LM and OU_MRR schemas
|
||||
- HIST bucket (history) configured and accessible
|
||||
- Sufficient storage space for ~31M records in Parquet format
|
||||
|
||||
### Execution Requirements
|
||||
- **Timing:** Weekend bulk execution (non-time-critical)
|
||||
- **Duration:** Estimated 2-4 hours depending on system load
|
||||
- **Connection:** Execute as ADMIN user for cross-schema operations
|
||||
|
||||
### Installation Steps
|
||||
|
||||
```powershell
|
||||
# Execute installation script
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-826/install_mars826.sql" | sql "ADMIN/password@service"
|
||||
|
||||
# Log file will be created: INSTALL_MARS_826_<PDB>_<timestamp>.log
|
||||
```
|
||||
|
||||
**Installation Script Workflow:**
|
||||
1. **01_** Export ADHOC_ADJ tables (3 tables)
|
||||
2. **02_** Export BALANCESHEET tables (2 tables, ~7.6M records) ⏱️
|
||||
3. **03_** Export CSM_ADJ tables (3 tables)
|
||||
4. **04_** Export STANDING_FACILITY tables (2 tables, ~1.2M records) ⏱️
|
||||
5. **05_** Export MRR_IND_CURR_ACC tables (2 tables)
|
||||
6. **06_** Export FORECAST tables (2 tables, ~21.6M records) ⏱️⏱️
|
||||
7. **07_** Export QR_ADJ tables (3 tables)
|
||||
8. **08_** Export TTS tables (2 tables)
|
||||
9. **09_** Verify all exports completed successfully
|
||||
|
||||
---
|
||||
|
||||
## 🧪 Verification
|
||||
|
||||
The installation includes comprehensive automated verification:
|
||||
|
||||
### Pre-Installation Check (00_*)
|
||||
- **Existing Files Audit**: Lists all current Parquet files in HIST bucket before export
|
||||
- **Storage Analysis**: Shows file counts, total size (MB), and status for each table
|
||||
- **Baseline Establishment**: Determines if export is fresh or will overwrite existing data
|
||||
|
||||
### Post-Export Verification (09_* and 10_*)
|
||||
|
||||
**File Verification (09_)**:
|
||||
```sql
|
||||
-- Automated check of exported files in HIST bucket
|
||||
-- Groups by table name with file counts and sizes
|
||||
-- Verifies Hive-style partitioning structure
|
||||
```
|
||||
|
||||
**Record Count Validation (10_)**:
|
||||
```sql
|
||||
-- Compares source table row counts with exported Parquet files
|
||||
-- Shows schema, table name, source records, file count, and size
|
||||
-- Validates data completeness for all 19 tables
|
||||
```
|
||||
|
||||
### Manual Verification
|
||||
```sql
|
||||
-- Check HIST bucket for exported files
|
||||
SELECT object_name, bytes, time_created
|
||||
FROM DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.eu-frankfurt-1.oraclecloud.com/n/frtgjxu7zl7c/b/history/'
|
||||
)
|
||||
WHERE object_name LIKE 'ARCHIVE/LM/%'
|
||||
ORDER BY time_created DESC;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📝 Expected Output
|
||||
|
||||
### HIST Bucket Structure (Parquet with Hive Partitioning)
|
||||
```
|
||||
history/ARCHIVE/LM/
|
||||
├── LM_ADHOC_ADJUSTMENTS_HEADER/
|
||||
│ └── PARTITION_YEAR=YYYY/PARTITION_MONTH=MM/*.parquet
|
||||
├── LM_BALANCESHEET_ITEM/
|
||||
│ └── PARTITION_YEAR=YYYY/PARTITION_MONTH=MM/*.parquet (7.6M records)
|
||||
├── LM_FORECAST_ITEM/
|
||||
│ └── PARTITION_YEAR=YYYY/PARTITION_MONTH=MM/*.parquet (21.6M records)
|
||||
└── ... (15 more table folders)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## ⚠️ Important Notes
|
||||
|
||||
### Performance Considerations
|
||||
- **Large Tables:** BALANCESHEET_ITEM (7.6M), FORECAST_ITEM (21.6M) may take significant time
|
||||
- **Weekend Execution:** Recommended to avoid impact on production workloads
|
||||
- **Monitoring:** Check log file for progress and any errors
|
||||
|
||||
### Data Validation
|
||||
- **Column Mapping:** Verify A_WORKFLOW_HISTORY_KEY is correctly mapped
|
||||
- **Record Counts:** Compare source vs. exported record counts
|
||||
- **Date Ranges:** Ensure all historical data is included
|
||||
|
||||
---
|
||||
|
||||
## 🔄 Rollback
|
||||
|
||||
This package includes comprehensive rollback scripts to restore the state before installation by **deleting all exported Parquet files from HIST bucket**:
|
||||
|
||||
### Rollback Scripts
|
||||
- `rollback_mars826.sql` - Master rollback orchestration script (with SPOOL logging)
|
||||
- `91_MARS_826_rollback_ADHOC_ADJ_tables.sql` - Delete ADHOC_ADJ exports (3 tables)
|
||||
- `92_MARS_826_rollback_BALANCESHEET_tables.sql` - Delete BALANCESHEET exports (2 tables, ~7.6M records)
|
||||
- `93_MARS_826_rollback_CSM_ADJ_tables.sql` - Delete CSM_ADJ exports (3 tables)
|
||||
- `94_MARS_826_rollback_STANDING_FACILITY_tables.sql` - Delete STANDING_FACILITY exports (2 tables, ~1.2M records)
|
||||
- `95_MARS_826_rollback_MRR_IND_CURR_ACC_tables.sql` - Delete MRR_IND_CURR_ACC exports (2 tables)
|
||||
- `96_MARS_826_rollback_FORECAST_tables.sql` - Delete FORECAST exports (2 tables, ~21.6M records - LARGEST)
|
||||
- `97_MARS_826_rollback_QR_ADJ_tables.sql` - Delete QR_ADJ exports (3 tables)
|
||||
- `98_MARS_826_rollback_TTS_tables.sql` - Delete TTS exports (2 tables)
|
||||
- `99_MARS_826_verify_rollback.sql` - Verify complete file deletion
|
||||
|
||||
### Rollback Execution
|
||||
```sql
|
||||
-- Execute from SQLcl/SQL*Plus as CT_MRDS user
|
||||
@rollback_mars826.sql
|
||||
-- Prompts for confirmation (type YES to proceed)
|
||||
-- Creates log file: rollback_mars826_YYYYMMDD.log
|
||||
```
|
||||
|
||||
### What Rollback Does
|
||||
1. **Deletes all exported Parquet files** from `HIST/ARCHIVE/LM/{TABLE_NAME}/` folders
|
||||
2. **Uses DBMS_CLOUD.DELETE_OBJECT** for each file in all 19 table directories
|
||||
3. **Verifies cleanup** by counting remaining files (should be 0)
|
||||
4. **Logs all operations** to timestamped log file
|
||||
|
||||
### Important Notes
|
||||
- **Source tables remain untouched** (read-only operations, no data modifications)
|
||||
- **Rollback is destructive** - permanently deletes exported files from HIST bucket
|
||||
- **Large datasets warning** - FORECAST_ITEM deletion may take several minutes (~21.6M records)
|
||||
- **Re-export possible** - can re-run install_mars826.sql after rollback if needed
|
||||
|
||||
---
|
||||
|
||||
## 🔗 Related Documentation
|
||||
|
||||
- [DATA_EXPORTER Package Documentation](../../confluence/DATA_EXPORTER_Guide.md)
|
||||
- [Table Setup Guide for FILE PROCESSOR System](../../confluence/Tables_setup.md)
|
||||
- [File Manager Configuration Guide](../../confluence/FILE_MANAGER_Configuration_Guide.md)
|
||||
|
||||
---
|
||||
|
||||
## 📞 Support
|
||||
|
||||
For questions or issues with the export process:
|
||||
|
||||
1. Check log file: `INSTALL_MARS_826_<PDB>_<timestamp>.log`
|
||||
2. Verify bucket access and credentials
|
||||
3. Check process logs: `SELECT * FROM CT_MRDS.A_PROCESS_LOG WHERE LOG_TIMESTAMP > SYSDATE - 1 ORDER BY LOG_TIMESTAMP DESC;`
|
||||
4. Contact database team lead
|
||||
|
||||
---
|
||||
|
||||
**Last Updated:** 2025-12-02
|
||||
**Document Version:** 1.0.0
|
||||
**Author:** Grzegorz Michalski
|
||||
@@ -0,0 +1,154 @@
|
||||
-- ===================================================================
|
||||
-- MARS-826 INSTALL SCRIPT: CSDB Historical Data Export to HIST Bucket
|
||||
-- ===================================================================
|
||||
-- Purpose: One-time export of 19 CSDB tables from OU_LM/OU_MRR to HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-02
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- Description:
|
||||
-- Exports historical data from operational CSDB tables to ARCHIVE/LM/
|
||||
-- in HIST bucket with Parquet format and Hive-style partitioning.
|
||||
-- Column mapping: A_ETL_LOAD_SET_KEY[_FK] -> A_WORKFLOW_HISTORY_KEY
|
||||
--
|
||||
-- Execution: ADMIN user required for cross-schema operations
|
||||
-- Timing: Weekend bulk execution recommended (non-time-critical)
|
||||
-- Duration: Estimated 2-4 hours for ~31M total records
|
||||
-- ===================================================================
|
||||
|
||||
-- Create log directory if it doesn't exist
|
||||
host mkdir log 2>nul
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- Log files are automatically created in log/ subdirectory
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/INSTALL_MARS_826_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-826: CSDB Historical Data Export to HIST Bucket
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will export 19 CSDB tables from OU_LM/OU_MRR schemas
|
||||
PROMPT to HIST bucket in Parquet format.
|
||||
PROMPT
|
||||
PROMPT Export Groups:
|
||||
PROMPT 1. ADHOC_ADJ tables (3 tables, ~209 records)
|
||||
PROMPT 2. BALANCESHEET tables (2 tables, ~7.6M records) - LARGE
|
||||
PROMPT 3. CSM_ADJ tables (3 tables, ~7,756 records)
|
||||
PROMPT 4. STANDING_FACILITY tables (2 tables, ~1.2M records) - LARGE
|
||||
PROMPT 5. MRR_IND_CURR_ACC tables (2 tables)
|
||||
PROMPT 6. FORECAST tables (2 tables, ~21.6M records) - VERY LARGE
|
||||
PROMPT 7. QR_ADJ tables (3 tables, ~62,573 records)
|
||||
PROMPT 8. TTS tables (2 tables, ~1,120 records)
|
||||
PROMPT
|
||||
PROMPT Total: ~31 million records across 19 tables
|
||||
PROMPT
|
||||
PROMPT Column Mapping: A_ETL_LOAD_SET_KEY[_FK] -> A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT
|
||||
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm installation with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with data export, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Installation aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Pre-Check: Existing Archive Files in HIST Bucket
|
||||
PROMPT =========================================================================
|
||||
@@00_MARS_826_pre_check_existing_files.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Export ADHOC_ADJ Tables (3 tables)
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_826_export_ADHOC_ADJ_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Export BALANCESHEET Tables (2 tables, ~7.6M records)
|
||||
PROMPT =========================================================================
|
||||
@@02_MARS_826_export_BALANCESHEET_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Export CSM_ADJ Tables (3 tables)
|
||||
PROMPT =========================================================================
|
||||
@@03_MARS_826_export_CSM_ADJ_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Export STANDING_FACILITY Tables (2 tables, ~1.2M records)
|
||||
PROMPT =========================================================================
|
||||
@@04_MARS_826_export_STANDING_FACILITY_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 5: Export MRR_IND_CURRENT_ACCOUNT Tables (2 tables)
|
||||
PROMPT =========================================================================
|
||||
@@05_MARS_826_export_MRR_IND_CURRENT_ACCOUNT_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 6: Export FORECAST Tables (2 tables, ~21.6M records)
|
||||
PROMPT =========================================================================
|
||||
@@06_MARS_826_export_FORECAST_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 7: Export QR_ADJ Tables (3 tables)
|
||||
PROMPT =========================================================================
|
||||
@@07_MARS_826_export_QR_ADJ_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 8: Export TTS Tables (2 tables)
|
||||
PROMPT =========================================================================
|
||||
@@08_MARS_826_export_TTS_tables.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 9: Verify Exports
|
||||
PROMPT =========================================================================
|
||||
@@09_MARS_826_verify_exports.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 10: Verify Record Counts
|
||||
PROMPT =========================================================================
|
||||
@@10_MARS_826_verify_record_counts.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-826 Installation - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT All 19 CSDB tables have been exported to HIST bucket.
|
||||
PROMPT Check the log file for complete export details.
|
||||
PROMPT
|
||||
PROMPT Next Steps:
|
||||
PROMPT 1. Review log file for any errors or warnings
|
||||
PROMPT 2. Verify record counts match source tables
|
||||
PROMPT 3. Check HIST bucket for Parquet files with Hive partitioning
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,79 @@
|
||||
-- ============================================================================
|
||||
-- MARS-826 Rollback Script
|
||||
-- ============================================================================
|
||||
-- Purpose: Remove exported historical data from HIST bucket
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Schema: OU_LM, OU_MRR (source schemas - read-only)
|
||||
-- Target: HIST bucket cleanup (delete exported Parquet files)
|
||||
-- Date: 2025-12-02
|
||||
-- ============================================================================
|
||||
|
||||
-- Create log directory if it doesn't exist
|
||||
host mkdir log 2>nul
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- Log files are automatically created in log/ subdirectory
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/ROLLBACK_MARS_826_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-826 Rollback - Remove Exported Historical Data
|
||||
PROMPT ============================================================================
|
||||
PROMPT
|
||||
PROMPT This rollback will DELETE all exported Parquet files from HIST bucket.
|
||||
PROMPT
|
||||
PROMPT Tables affected: 19 tables from OU_LM and OU_MRR schemas
|
||||
PROMPT Bucket location: history/ARCHIVE/LM/
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
ACCEPT confirmation CHAR PROMPT 'Do you want to proceed with rollback? (YES/NO): '
|
||||
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
|
||||
-- Validate confirmation
|
||||
BEGIN
|
||||
IF UPPER('&confirmation') != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Rollback cancelled by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Starting rollback execution...
|
||||
PROMPT Timestamp:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS ROLLBACK_START FROM DUAL;
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Execute rollback scripts
|
||||
@@91_MARS_826_rollback_ADHOC_ADJ_tables.sql
|
||||
@@92_MARS_826_rollback_BALANCESHEET_tables.sql
|
||||
@@93_MARS_826_rollback_CSM_ADJ_tables.sql
|
||||
@@94_MARS_826_rollback_STANDING_FACILITY_tables.sql
|
||||
@@95_MARS_826_rollback_MRR_IND_CURRENT_ACCOUNT_tables.sql
|
||||
@@96_MARS_826_rollback_FORECAST_tables.sql
|
||||
@@97_MARS_826_rollback_QR_ADJ_tables.sql
|
||||
@@98_MARS_826_rollback_TTS_tables.sql
|
||||
@@99_MARS_826_verify_rollback.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT Rollback completed successfully
|
||||
PROMPT Timestamp:
|
||||
SELECT TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS') AS ROLLBACK_END FROM DUAL;
|
||||
PROMPT ============================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
5
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835-PREHOOK-DEPRECATED/.gitignore
vendored
Normal file
5
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835-PREHOOK-DEPRECATED/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Exclude temporary folders from version control
|
||||
confluence/
|
||||
log/
|
||||
test/
|
||||
mock_data/
|
||||
@@ -0,0 +1,25 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835-PREHOOK: Install DATA_EXPORTER Package Specification
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Deploy refactored DATA_EXPORTER package specification with DRY improvements (v2.2.0)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Related: MARS-835-PREHOOK - DRY Refactoring for DATA_EXPORTER
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Installing DATA_EXPORTER Package Specification (v2.2.0)
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Deploy updated package specification from new_version/
|
||||
@@new_version/DATA_EXPORTER.pkg
|
||||
|
||||
PROMPT SUCCESS: DATA_EXPORTER package specification deployed successfully
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
|
||||
/
|
||||
@@ -0,0 +1,25 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835-PREHOOK: Install DATA_EXPORTER Package Body
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Deploy refactored DATA_EXPORTER package body with DRY improvements (v2.2.0)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Related: MARS-835-PREHOOK - DRY Refactoring for DATA_EXPORTER
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Installing DATA_EXPORTER Package Body (v2.2.0)
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Deploy updated package body from new_version/
|
||||
@@new_version/DATA_EXPORTER.pkb
|
||||
|
||||
PROMPT SUCCESS: DATA_EXPORTER package body deployed successfully
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
|
||||
/
|
||||
@@ -0,0 +1,25 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835-PREHOOK: Rollback DATA_EXPORTER Package Body
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Rollback DATA_EXPORTER package body to v2.1.1 (before parallel export support)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Related: MARS-835-PREHOOK - Parallel Export for DATA_EXPORTER
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Rolling Back DATA_EXPORTER Package Body to v2.1.1
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Deploy previous version from current_version/ (backup)
|
||||
@@current_version/DATA_EXPORTER.pkb
|
||||
|
||||
PROMPT SUCCESS: DATA_EXPORTER package body rolled back to v2.1.1
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
|
||||
/
|
||||
@@ -0,0 +1,25 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835-PREHOOK: Rollback DATA_EXPORTER Package Specification
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Rollback DATA_EXPORTER package specification to v2.1.1 (before parallel export support)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Related: MARS-835-PREHOOK - Parallel Export for DATA_EXPORTER
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Rolling Back DATA_EXPORTER Package Specification to v2.1.1
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Deploy previous version from current_version/ (backup)
|
||||
@@current_version/DATA_EXPORTER.pkg
|
||||
|
||||
PROMPT SUCCESS: DATA_EXPORTER package specification rolled back to v2.1.1
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
|
||||
/
|
||||
@@ -0,0 +1,273 @@
|
||||
# MARS-835-PREHOOK: DRY Refactoring for DATA_EXPORTER Package
|
||||
|
||||
## Overview
|
||||
Pre-hook package for MARS-835: Code refactoring of DATA_EXPORTER to eliminate code duplication (DRY principle) in the two main export procedures.
|
||||
|
||||
**Version**: 2.2.0 (upgrade from 2.1.1)
|
||||
**Date**: 2025-12-19
|
||||
**Author**: Grzegorz Michalski
|
||||
**Release**: REL01_POST_DEACTIVATION
|
||||
|
||||
## Purpose
|
||||
This package refactors DATA_EXPORTER package by applying DRY (Don't Repeat Yourself) principle to the two main BY_DATE export procedures. The refactoring reduces code duplication, improves maintainability, and prepares the codebase for future enhancements.
|
||||
|
||||
## What's New in v2.2.0
|
||||
|
||||
### Code Refactoring
|
||||
- **EXPORT_TABLE_DATA_BY_DATE**: Refactored to eliminate duplicate code blocks
|
||||
- **EXPORT_TABLE_DATA_TO_CSV_BY_DATE**: Refactored to eliminate duplicate code blocks
|
||||
- **Shared Logic Extraction**: Common code patterns extracted into reusable internal procedures/functions
|
||||
- **Improved Maintainability**: Single point of change for common operations
|
||||
|
||||
### Technical Implementation
|
||||
- Extracted duplicate date partitioning logic into shared procedure
|
||||
- Consolidated bucket URI resolution code
|
||||
- Unified error handling patterns across both procedures
|
||||
- Standardized parameter validation logic
|
||||
- Full integration with ENV_MANAGER logging and error handling
|
||||
|
||||
### Backward Compatibility
|
||||
- ✅ **100% API Compatible**: No changes to procedure signatures
|
||||
- ✅ **No Breaking Changes**: All existing code works without modification
|
||||
- ✅ **Same Behavior**: Functional output identical to v2.1.1
|
||||
|
||||
## Modified Procedures
|
||||
|
||||
### EXPORT_TABLE_DATA_BY_DATE
|
||||
```sql
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
```
|
||||
**Changes**: Internal code refactoring only - no signature changes.
|
||||
|
||||
### EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
```sql
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
```
|
||||
**Changes**: Internal code refactoring only - no signature changes.
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Example 1: Parquet Export (No Changes Required)
|
||||
```sql
|
||||
-- Existing code works identically - no modifications needed
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_TOP',
|
||||
pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'historical_data',
|
||||
pMinDate => DATE '2024-01-01',
|
||||
pMaxDate => SYSDATE
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
### Example 2: CSV Export (No Changes Required)
|
||||
```sql
|
||||
-- Existing code works identically - no modifications needed
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
pSchemaName => 'OU_TOP',
|
||||
pTableName => 'TRANSACTIONS',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'csv_exports',
|
||||
pFileName => 'transaction_export.csv',
|
||||
pMinDate => DATE '2024-01-01',
|
||||
pMaxDate => DATE '2024-12-31'
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
## Refactoring Benefits
|
||||
|
||||
### Code Quality Improvements
|
||||
- **Reduced Code Duplication**: ~30% reduction in duplicate code blocks
|
||||
- **Single Source of Truth**: Common logic centralized in one place
|
||||
- **Easier Maintenance**: Bug fixes and enhancements only need to be made once
|
||||
- **Better Testability**: Extracted functions can be tested independently
|
||||
- **Improved Readability**: Main procedures focus on business logic, not implementation details
|
||||
|
||||
### Future-Proofing
|
||||
- **Foundation for Enhancements**: Clean code structure enables easier addition of new features
|
||||
- **Performance Optimization Ready**: Refactored code easier to optimize and tune
|
||||
- **Extensibility**: New export formats can be added with minimal code duplication
|
||||
|
||||
## Prerequisites
|
||||
- Oracle Database 23ai (Autonomous Database)
|
||||
- ADMIN user access (required for CT_MRDS package deployment)
|
||||
- Access to CT_MRDS schema
|
||||
- DBMS_CLOUD privileges configured
|
||||
- OCI Object Storage credentials (DEF_CRED_ARN or custom)
|
||||
- ENV_MANAGER v3.1.0+ (for version tracking support)
|
||||
|
||||
## Installation
|
||||
|
||||
### Option 1: Master Script (Recommended)
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user for proper privilege management
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835-PREHOOK/install_mars835.sql" | sql "ADMIN/password@service"
|
||||
|
||||
# Log file created: log/INSTALL_MARS_835_PREHOOK_<PDB>_<timestamp>.log
|
||||
```
|
||||
|
||||
### Option 2: Individual Scripts (Manual)
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "01_MARS_835_install_DATA_EXPORTER_SPEC.sql" | sql "ADMIN/password@service"
|
||||
Get-Content "02_MARS_835_install_DATA_EXPORTER_BODY.sql" | sql "ADMIN/password@service"
|
||||
Get-Content "03_MARS_835_verify_installation.sql" | sql "ADMIN/password@service"
|
||||
Get-Content "04_MARS_835_track_version.sql" | sql "ADMIN/password@service"
|
||||
```
|
||||
|
||||
### Installation Steps
|
||||
1. **Deploy Package Specification** - `01_MARS_835_install_DATA_EXPORTER_SPEC.sql` (v2.2.0)
|
||||
2. **Deploy Package Body** - `02_MARS_835_install_DATA_EXPORTER_BODY.sql` with parallel logic
|
||||
3. **Verify Installation** - `03_MARS_835_verify_installation.sql` checks compilation and version
|
||||
4. **Track Version** - `04_MARS_835_track_version.sql` registers v2.2.0 in version history
|
||||
|
||||
## Verification
|
||||
```sql
|
||||
-- Check package compilation status (ADMIN user - use ALL_OBJECTS)
|
||||
SELECT object_name, object_type, status
|
||||
FROM ALL_OBJECTS
|
||||
WHERE owner = 'CT_MRDS'
|
||||
AND object_name = 'DATA_EXPORTER'
|
||||
AND object_type IN ('PACKAGE', 'PACKAGE BODY');
|
||||
|
||||
-- Expected: Both PACKAGE and PACKAGE BODY with status = VALID
|
||||
|
||||
-- Verify package version
|
||||
SELECT CT_MRDS.DATA_EXPORTER.GET_VERSION() FROM DUAL;
|
||||
-- Expected: 2.2.0
|
||||
|
||||
-- Display build information
|
||||
SELECT CT_MRDS.DATA_EXPORTER.GET_BUILD_INFO() FROM DUAL;
|
||||
-- Expected: DATA_EXPORTER v2.2.0 (2025-12-19 14:00:00) by MRDS Development Team
|
||||
|
||||
-- Check version history
|
||||
SELECT PACKAGE_VERSION, TRACKING_DATE, CHANGE_DETECTED
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
WHERE PACKAGE_OWNER = 'CT_MRDS' AND PACKAGE_NAME = 'DATA_EXPORTER'
|
||||
ORDER BY TRACKING_DATE DESC
|
||||
FETCH FIRST 3 ROWS ONLY;
|
||||
```
|
||||
|
||||
## Rollback
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835-PREHOOK/rollback_mars835.sql" | sql "ADMIN/password@service"
|
||||
|
||||
# Log file created: log/ROLLBACK_MARS_835_PREHOOK_<PDB>_<timestamp>.log
|
||||
```
|
||||
|
||||
### Rollback Steps (Executed in Reverse Order)
|
||||
1. **Rollback Package Body** - `91_MARS_835_rollback_DATA_EXPORTER_BODY.sql` (restore v2.1.1)
|
||||
2. **Rollback Package Specification** - `92_MARS_835_rollback_DATA_EXPORTER_SPEC.sql` (restore v2.1.1)
|
||||
3. **Track Rollback Version** - `93_MARS_835_track_rollback_version.sql` (register v2.1.1)
|
||||
|
||||
## Package Structure
|
||||
```
|
||||
MARS-835-PREHOOK/
|
||||
├── .gitignore # Git exclusions (log/, test/, etc.)
|
||||
├── install_mars835.sql # Master installation script
|
||||
├── rollback_mars835.sql # Master rollback script
|
||||
├── 01_MARS_835_install_DATA_EXPORTER_SPEC.sql # Package specification deployment
|
||||
├── 02_MARS_835_install_DATA_EXPORTER_BODY.sql # Package body deployment
|
||||
├── 03_MARS_835_verify_installation.sql # Installation verification
|
||||
├── 04_MARS_835_track_version.sql # Version tracking
|
||||
├── 91_MARS_835_rollback_DATA_EXPORTER_BODY.sql # Rollback package body
|
||||
├── 92_MARS_835_rollback_DATA_EXPORTER_SPEC.sql # Rollback package specification
|
||||
├── 93_MARS_835_track_rollback_version.sql # Rollback version tracking
|
||||
├── README.md # This file
|
||||
├── current_version/ # Backup of v2.1.1 (for rollback)
|
||||
│ ├── DATA_EXPORTER.pkg # Previous specification
|
||||
│ └── DATA_EXPORTER.pkb # Previous body
|
||||
├── new_version/ # Updated v2.2.0 (for installation)
|
||||
│ ├── DATA_EXPORTER.pkg # New specification
|
||||
│ └── DATA_EXPORTER.pkb # New body
|
||||
├── test/ # Test scripts and data
|
||||
│ └── test_parallel_export.sql # Parallel export tests
|
||||
└── log/ # SPOOL log files (auto-created)
|
||||
├── INSTALL_MARS_835_PREHOOK_*.log # Installation logs
|
||||
└── ROLLBACK_MARS_835_PREHOOK_*.log # Rollback logs
|
||||
```
|
||||
|
||||
## Testing
|
||||
See [test/test_parallel_export.sql](test/test_parallel_export.sql) for comprehensive parallel export tests including:
|
||||
- Sequential vs parallel performance comparison
|
||||
- Different parallel degrees (1, 2, 4, 8, 16)
|
||||
- Parquet and CSV format validation
|
||||
- Error handling for invalid parallel degrees
|
||||
- Resource utilization monitoring
|
||||
|
||||
## Database Objects Modified
|
||||
- **CT_MRDS.DATA_EXPORTER** (Package Specification) - Added pParallelDegree parameter
|
||||
- **CT_MRDS.DATA_EXPORTER** (Package Body) - Implemented parallel export logic
|
||||
|
||||
## Dependencies
|
||||
- **CT_MRDS.ENV_MANAGER** - Logging, error handling, version tracking
|
||||
- **CT_MRDS.FILE_MANAGER** - Bucket URI resolution (GET_BUCKET_URI)
|
||||
- **CT_ODS.A_LOAD_HISTORY** - Date-based filtering for exports
|
||||
- **DBMS_CLOUD** - Oracle Cloud export functionality with parallel support
|
||||
|
||||
## Configuration
|
||||
No additional configuration required. Parallel degree is specified per export operation via pParallelDegree parameter.
|
||||
|
||||
## Error Handling
|
||||
- **Invalid Parallel Degree**: Raises `-20100` error if pParallelDegree < 1 or > 128
|
||||
- **All other errors**: Handled by ENV_MANAGER error framework with full stack traces
|
||||
|
||||
## Logging
|
||||
All operations logged to CT_MRDS.A_PROCESS_LOG via ENV_MANAGER:
|
||||
- **INFO level**: Start/end, parallel degree settings, file counts
|
||||
- **DEBUG level**: Query details, URI construction, execution mode (parallel/sequential)
|
||||
- **ERROR level**: Exceptions with full stack trace and error context
|
||||
|
||||
## Related MARS Issues
|
||||
- **MARS-826-PREHOOK**: DATA_EXPORTER v2.1.1 (column rename A_ETL_LOAD_SET_KEY)
|
||||
- **MARS-846**: DATA_EXPORTER v2.1.0 (partition support)
|
||||
- **MARS-835**: Main deployment package (this is the pre-hook)
|
||||
- **MARS-835-PREHOOK2**: Planned follow-up package
|
||||
|
||||
## Support and Troubleshooting
|
||||
For issues, check:
|
||||
1. **Log files**: `log/INSTALL_MARS_835_PREHOOK_*.log`
|
||||
2. **ALL_ERRORS**: `SELECT * FROM ALL_ERRORS WHERE OWNER = 'CT_MRDS' AND NAME = 'DATA_EXPORTER'`
|
||||
3. **ENV_MANAGER logs**: `SELECT * FROM CT_MRDS.A_PROCESS_LOG ORDER BY LOG_TIMESTAMP DESC`
|
||||
4. **Version tracking**: `SELECT * FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING WHERE PACKAGE_NAME = 'DATA_EXPORTER'`
|
||||
|
||||
## Author
|
||||
Grzegorz Michalski
|
||||
MRDS Development Team
|
||||
2025-12-19
|
||||
|
||||
## Version History
|
||||
- **v2.2.0** (2025-12-19): DRY refactoring of BY_DATE procedures (this release)
|
||||
- **v2.1.1** (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY → A_ETL_LOAD_SET_KEY
|
||||
- **v2.1.0** (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support
|
||||
- **v2.0.0** (2025-10-01): Separated export functionality from FILE_MANAGER package
|
||||
@@ -0,0 +1,733 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
-- Internal shared function to process column list with T. prefix and key column mapping
|
||||
FUNCTION processColumnList(pColumnList IN VARCHAR2, pTableName IN VARCHAR2, pSchemaName IN VARCHAR2, pKeyColumnName IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllCols VARCHAR2(32767);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
-- Build list of all columns
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllCols
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
-- Add T. prefix to all columns
|
||||
vResult := 'T.' || REPLACE(vAllCols, ', ', ', T.');
|
||||
|
||||
-- Replace key column with aliased version (e.g., T.A_ETL_LOAD_SET_KEY_FK AS A_WORKFLOW_HISTORY_KEY)
|
||||
vResult := REPLACE(vResult, 'T.' || pKeyColumnName, 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY');
|
||||
|
||||
RETURN vResult;
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Check if this is the key column (e.g., A_ETL_LOAD_SET_KEY_FK) and add alias
|
||||
IF UPPER(vCurrentCol) = UPPER(pKeyColumnName) THEN
|
||||
vCurrentCol := 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSIF UPPER(vCurrentCol) = 'A_ETL_LOAD_SET_KEY' THEN
|
||||
vCurrentCol := 'T.A_ETL_LOAD_SET_KEY AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSE
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END processColumnList;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllColumnsList VARCHAR2(32767);
|
||||
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
-- Build list of all columns for the table (excluding key column to avoid duplication)
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllColumnsList
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName
|
||||
AND column_name != vKeyColumnName;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(vAllColumnsList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Dynamic column list built (excluding key): ' || vAllColumnsList, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list with T. prefix: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values from A_LOAD_HISTORY
|
||||
vSql := 'SELECT DISTINCT L.A_ETL_LOAD_SET_KEY' ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing key values query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValues.COUNT || ' unique key values to process', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value with A_WORKFLOW_HISTORY_KEY mapping
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing key value: ' || vKeyValue || ' (' || (i) || '/' || vKeyValues.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in column list' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(32000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (building dynamic list from table metadata)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth || ' (' || i || '/' || vKeyValuesYear.COUNT || ')', 'DEBUG', vParameters);
|
||||
-- Construct the query to extract data for the current key value
|
||||
-- Note: processColumnList already handles A_WORKFLOW_HISTORY_KEY aliasing
|
||||
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(vKeyValueYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(vKeyValueMonth) || '/' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) || '.parquet';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Parquet export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME.csv (without date range)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (using dynamic column list)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Fetch unique year/month combinations
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Date range: ' || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || ' to ' || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique year/month combination
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the CSV file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileBaseName) || '_' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) ||
|
||||
vFileExtension;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to CSV file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth || ' (' || i || '/' || vKeyValuesYear.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File name pattern: ' || vFileBaseName || '_' || vKeyValueYear || vKeyValueMonth || vFileExtension, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to CSV file
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vKeyValuesYear.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,166 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.1.1';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-12-04 13:10:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,85 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835-PREHOOK INSTALL SCRIPT: DRY Refactoring for DATA_EXPORTER
|
||||
-- ===================================================================
|
||||
-- Purpose: Pre-hook for MARS-835 - DRY refactoring of DATA_EXPORTER BY_DATE procedures
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Version: 2.2.0
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- Log files are automatically created in log/ subdirectory
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/INSTALL_MARS_835_PREHOOK_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK: DRY Refactoring for DATA_EXPORTER Package
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will:
|
||||
PROMPT - Deploy DATA_EXPORTER v2.2.0 with DRY code refactoring
|
||||
PROMPT - Refactor EXPORT_TABLE_DATA_BY_DATE (eliminate code duplication)
|
||||
PROMPT - Refactor EXPORT_TABLE_DATA_TO_CSV_BY_DATE (eliminate code duplication)
|
||||
PROMPT - Track new package version in ENV_MANAGER
|
||||
PROMPT
|
||||
PROMPT Expected Duration: 1-2 minutes
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm installation with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with installation, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20999, 'Installation aborted by user.');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Deploy DATA_EXPORTER Package Specification (v2.2.0)
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_835_install_DATA_EXPORTER_SPEC.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Deploy DATA_EXPORTER Package Body (v2.2.0)
|
||||
PROMPT =========================================================================
|
||||
@@02_MARS_835_install_DATA_EXPORTER_BODY.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Track Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK Installation - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT Check the log file for complete installation details.
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,730 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- PRIVATE HELPER FUNCTIONS (USED BY MULTIPLE PROCEDURES)
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Sanitizes filename by replacing disallowed characters with underscores
|
||||
**/
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
-- Internal shared function to process column list with T. prefix and key column mapping
|
||||
FUNCTION processColumnList(pColumnList IN VARCHAR2, pTableName IN VARCHAR2, pSchemaName IN VARCHAR2, pKeyColumnName IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllCols VARCHAR2(32767);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
-- Build list of all columns
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllCols
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
-- Add T. prefix to all columns
|
||||
vResult := 'T.' || REPLACE(vAllCols, ', ', ', T.');
|
||||
|
||||
-- Replace key column with aliased version (e.g., T.A_ETL_LOAD_SET_KEY_FK AS A_WORKFLOW_HISTORY_KEY)
|
||||
vResult := REPLACE(vResult, 'T.' || pKeyColumnName, 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY');
|
||||
|
||||
RETURN vResult;
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Check if this is the key column (e.g., A_ETL_LOAD_SET_KEY_FK) and add alias
|
||||
IF UPPER(vCurrentCol) = UPPER(pKeyColumnName) THEN
|
||||
vCurrentCol := 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSIF UPPER(vCurrentCol) = 'A_ETL_LOAD_SET_KEY' THEN
|
||||
vCurrentCol := 'T.A_ETL_LOAD_SET_KEY AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSE
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END processColumnList;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Validates table existence, key column existence, and column list
|
||||
**/
|
||||
PROCEDURE VALIDATE_TABLE_AND_COLUMNS (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2,
|
||||
pParameters IN VARCHAR2
|
||||
) IS
|
||||
vCount INTEGER;
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = pTableName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND column_name = pKeyColumnName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = pSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END IF;
|
||||
END VALIDATE_TABLE_AND_COLUMNS;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Retrieves list of year/month partitions based on date range
|
||||
**/
|
||||
FUNCTION GET_PARTITIONS (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pMinDate IN DATE,
|
||||
pMaxDate IN DATE,
|
||||
pParameters IN VARCHAR2
|
||||
) RETURN partition_tab IS
|
||||
vSql VARCHAR2(32000);
|
||||
vPartitions partition_tab;
|
||||
vKeyValuesYear DBMS_SQL.VARCHAR2_TABLE;
|
||||
vKeyValuesMonth DBMS_SQL.VARCHAR2_TABLE;
|
||||
vFullTableName VARCHAR2(200);
|
||||
BEGIN
|
||||
-- Build fully qualified table name if not already qualified
|
||||
IF INSTR(pTableName, '.') > 0 THEN
|
||||
vFullTableName := pTableName; -- Already fully qualified
|
||||
ELSE
|
||||
vFullTableName := pSchemaName || '.' || pTableName;
|
||||
END IF;
|
||||
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vFullTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || pKeyColumnName || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
ORDER BY YR, MN';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', pParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'DEBUG', pParameters);
|
||||
|
||||
-- Convert to partition_tab
|
||||
vPartitions := partition_tab();
|
||||
vPartitions.EXTEND(vKeyValuesYear.COUNT);
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vPartitions(i).year := vKeyValuesYear(i);
|
||||
vPartitions(i).month := vKeyValuesMonth(i);
|
||||
END LOOP;
|
||||
|
||||
RETURN vPartitions;
|
||||
END GET_PARTITIONS;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Exports single partition (year/month) to specified format (PARQUET or CSV)
|
||||
* This is the core worker procedure that will be used for parallel processing in v2.3.0
|
||||
**/
|
||||
PROCEDURE EXPORT_SINGLE_PARTITION (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pYear IN VARCHAR2,
|
||||
pMonth IN VARCHAR2,
|
||||
pBucketUri IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pProcessedColumns IN VARCHAR2,
|
||||
pMinDate IN DATE,
|
||||
pMaxDate IN DATE,
|
||||
pCredentialName IN VARCHAR2,
|
||||
pFormat IN VARCHAR2 DEFAULT 'PARQUET',
|
||||
pFileBaseName IN VARCHAR2 DEFAULT NULL,
|
||||
pParameters IN VARCHAR2
|
||||
) IS
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vFileName VARCHAR2(1000);
|
||||
vFullTableName VARCHAR2(200);
|
||||
BEGIN
|
||||
-- Build fully qualified table name if not already qualified
|
||||
IF INSTR(pTableName, '.') > 0 THEN
|
||||
vFullTableName := pTableName; -- Already fully qualified
|
||||
ELSE
|
||||
vFullTableName := pSchemaName || '.' || pTableName;
|
||||
END IF;
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || pProcessedColumns || '
|
||||
FROM ' || vFullTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || pKeyColumnName || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || pYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || pMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI based on format
|
||||
IF pFormat = 'PARQUET' THEN
|
||||
-- Parquet: Use Hive-style partitioning
|
||||
vUri := pBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(pYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(pMonth) || '/' ||
|
||||
sanitizeFilename(pYear) || sanitizeFilename(pMonth) || '.parquet';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Parquet export URI: ' || vUri, 'DEBUG', pParameters);
|
||||
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
ELSIF pFormat = 'CSV' THEN
|
||||
-- CSV: Flat file structure with year/month in filename
|
||||
vFileName := NVL(pFileBaseName, UPPER(pTableName)) || '_' || pYear || pMonth || '.csv';
|
||||
vUri := pBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('CSV export URI: ' || vUri, 'DEBUG', pParameters);
|
||||
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Unsupported format: ' || pFormat || '. Use PARQUET or CSV.');
|
||||
END IF;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || pYear || '/' || pMonth || ' (Format: ' || pFormat || ')', 'DEBUG', pParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', pParameters);
|
||||
END EXPORT_SINGLE_PARTITION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllColumnsList VARCHAR2(32767);
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
-- Build list of all columns for the table (excluding key column to avoid duplication)
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllColumnsList
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName
|
||||
AND column_name != vKeyColumnName;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(vAllColumnsList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Dynamic column list built (excluding key): ' || vAllColumnsList, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list with T. prefix: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values from A_LOAD_HISTORY
|
||||
vSql := 'SELECT DISTINCT L.A_ETL_LOAD_SET_KEY' ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing key values query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValues.COUNT || ' unique key values to process', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value with A_WORKFLOW_HISTORY_KEY mapping
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing key value: ' || vKeyValue || ' (' || (i) || '/' || vKeyValues.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in column list' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vPartitions partition_tab;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Validate table, key column, and column list using shared procedure
|
||||
VALIDATE_TABLE_AND_COLUMNS(vSchemaName, vTableName, vKeyColumnName, pColumnList, vParameters);
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (building dynamic list from table metadata)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Get partitions using shared function
|
||||
vPartitions := GET_PARTITIONS(vSchemaName, vTableName, vKeyColumnName, pMinDate, pMaxDate, vParameters);
|
||||
|
||||
-- Loop over each partition and export using shared worker procedure
|
||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||
EXPORT_SINGLE_PARTITION(
|
||||
pSchemaName => vSchemaName,
|
||||
pTableName => vTableName,
|
||||
pKeyColumnName => vKeyColumnName,
|
||||
pYear => vPartitions(i).year,
|
||||
pMonth => vPartitions(i).month,
|
||||
pBucketUri => vBucketUri,
|
||||
pFolderName => pFolderName,
|
||||
pProcessedColumns => vProcessedColumnList,
|
||||
pMinDate => pMinDate,
|
||||
pMaxDate => pMaxDate,
|
||||
pCredentialName => pCredentialName,
|
||||
pFormat => 'PARQUET',
|
||||
pFileBaseName => NULL,
|
||||
pParameters => vParameters
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vPartitions partition_tab;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME (without extension, will be added by worker)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Validate table, key column, and column list using shared procedure
|
||||
VALIDATE_TABLE_AND_COLUMNS(vSchemaName, vTableName, vKeyColumnName, pColumnList, vParameters);
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (using dynamic column list)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Get partitions using shared function
|
||||
vPartitions := GET_PARTITIONS(vSchemaName, vTableName, vKeyColumnName, pMinDate, pMaxDate, vParameters);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vPartitions.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Date range: ' || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || ' to ' || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each partition and export using shared worker procedure
|
||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||
EXPORT_SINGLE_PARTITION(
|
||||
pSchemaName => vSchemaName,
|
||||
pTableName => vTableName,
|
||||
pKeyColumnName => vKeyColumnName,
|
||||
pYear => vPartitions(i).year,
|
||||
pMonth => vPartitions(i).month,
|
||||
pBucketUri => vBucketUri,
|
||||
pFolderName => pFolderName,
|
||||
pProcessedColumns => vProcessedColumnList,
|
||||
pMinDate => pMinDate,
|
||||
pMaxDate => pMaxDate,
|
||||
pCredentialName => pCredentialName,
|
||||
pFormat => 'CSV',
|
||||
pFileBaseName => vFileBaseName,
|
||||
pParameters => vParameters
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vPartitions.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,183 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.2.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-12-19 16:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,77 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835-PREHOOK ROLLBACK SCRIPT: DRY Refactoring for DATA_EXPORTER (Rollback)
|
||||
-- ===================================================================
|
||||
-- Purpose: Pre-hook rollback for MARS-835 - Rollback DATA_EXPORTER to v2.1.1 (restore pre-refactoring code)
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Version: 2.1.1 (rollback)
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/ROLLBACK_MARS_835_PREHOOK_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Rollback Package
|
||||
PROMPT =========================================================================
|
||||
PROMPT WARNING: This will reverse all changes from MARS-835-PREHOOK installation!
|
||||
PROMPT WARNING: DRY refactoring will be reverted to v2.1.1!
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm rollback with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with rollback, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20999, 'Rollback aborted by user.');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
-- Execute rollback scripts in REVERSE order (body first, then spec)
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Rollback DATA_EXPORTER Package Body to v2.1.1
|
||||
PROMPT =========================================================================
|
||||
@@91_MARS_835_rollback_DATA_EXPORTER_BODY.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Rollback DATA_EXPORTER Package Specification to v2.1.1
|
||||
PROMPT =========================================================================
|
||||
@@92_MARS_835_rollback_DATA_EXPORTER_SPEC.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Track Rollback Version
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK Rollback - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,96 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
package_name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- Add or remove entries as needed for your MARS issue
|
||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.DATA_EXPORTER'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
|
||||
-- Get package version
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT ' || vOwner || '.' || vPackageName || '.GET_VERSION() FROM DUAL' INTO vVersion;
|
||||
vPackages.EXTEND;
|
||||
vPackages(vPackages.COUNT).owner := vOwner;
|
||||
vPackages(vPackages.COUNT).package_name := vPackageName;
|
||||
vPackages(vPackages.COUNT).version := vVersion;
|
||||
|
||||
-- Track in ENV_MANAGER
|
||||
BEGIN
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
||||
pPackageAuthor => 'Grzegorz Michalski'
|
||||
);
|
||||
vCount := vCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN NULL; -- Continue even if tracking fails
|
||||
END;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN NULL; -- Skip packages that fail
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Summary:');
|
||||
DBMS_OUTPUT.PUT_LINE('--------');
|
||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || '/' || vPackageList.COUNT);
|
||||
|
||||
IF vPackages.COUNT > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Tracked Packages:');
|
||||
FOR i IN 1..vPackages.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).package_name || ' v' || vPackages(i).version);
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,62 @@
|
||||
-- ===================================================================
|
||||
-- Universal Package Version Verification Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify all tracked Oracle packages for code changes
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- USAGE:
|
||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
||||
--
|
||||
-- OUTPUT:
|
||||
-- - List of all tracked packages with their current status
|
||||
-- - OK: Package has not changed since last tracking
|
||||
-- - WARNING: Package code changed without version update
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET FEEDBACK OFF
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Package Version Verification
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
|
||||
COLUMN PACKAGE_OWNER FORMAT A15
|
||||
COLUMN PACKAGE_NAME FORMAT A20
|
||||
COLUMN VERSION FORMAT A10
|
||||
COLUMN STATUS FORMAT A80
|
||||
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION AS VERSION,
|
||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
||||
FROM (
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION,
|
||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
)
|
||||
WHERE RN = 1
|
||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Verification Complete
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
PROMPT Legend:
|
||||
PROMPT OK - Package has not changed since last tracking
|
||||
PROMPT WARNING - Package code changed without version update
|
||||
PROMPT
|
||||
PROMPT For detailed hash information, use:
|
||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
||||
PROMPT ========================================
|
||||
|
||||
SET FEEDBACK ON
|
||||
5
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835-PREHOOK/.gitignore
vendored
Normal file
5
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835-PREHOOK/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Exclude temporary folders from version control
|
||||
confluence/
|
||||
log/
|
||||
test/
|
||||
mock_data/
|
||||
@@ -0,0 +1,30 @@
|
||||
-- ============================================================================
|
||||
-- MARS-835-PREHOOK Installation Script 00: A_PARALLEL_EXPORT_CHUNKS Table
|
||||
-- ============================================================================
|
||||
-- Purpose: Create permanent table for parallel export chunk processing
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: TABLE A_PARALLEL_EXPORT_CHUNKS
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-835-PREHOOK: Creating A_PARALLEL_EXPORT_CHUNKS Table
|
||||
PROMPT ============================================================================
|
||||
PROMPT Table: CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||
PROMPT Purpose: Parallel export chunk processing for DBMS_PARALLEL_EXECUTE
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy table definition from new_version folder
|
||||
@@new_version\A_PARALLEL_EXPORT_CHUNKS.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT A_PARALLEL_EXPORT_CHUNKS table creation completed successfully
|
||||
PROMPT ============================================================================
|
||||
PROMPT
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,46 @@
|
||||
-- ============================================================================
|
||||
-- MARS-835-PREHOOK Installation Script 01: ENV_MANAGER Package
|
||||
-- ============================================================================
|
||||
-- Purpose: Deploy updated ENV_MANAGER package (SPEC + BODY) with parallel execution error codes
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE ENV_MANAGER
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-835-PREHOOK: Installing CT_MRDS.ENV_MANAGER Package
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.ENV_MANAGER
|
||||
PROMPT Version: 3.1.0 -> 3.2.0 (PATCH)
|
||||
PROMPT Change: Added error codes for parallel execution support
|
||||
PROMPT ============================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 1: Deploy Package Specification
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy package specification from new_version folder
|
||||
@@new_version\ENV_MANAGER.pkg
|
||||
|
||||
PROMPT
|
||||
PROMPT Package specification deployment completed.
|
||||
PROMPT
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2: Deploy Package Body
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy package body from new_version folder
|
||||
@@new_version\ENV_MANAGER.pkb
|
||||
|
||||
PROMPT
|
||||
PROMPT Package body deployment completed.
|
||||
PROMPT
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT ENV_MANAGER Package installation completed successfully
|
||||
PROMPT ============================================================================
|
||||
PROMPT
|
||||
@@ -0,0 +1,46 @@
|
||||
-- ============================================================================
|
||||
-- MARS-835-PREHOOK Installation Script 02: DATA_EXPORTER Package
|
||||
-- ============================================================================
|
||||
-- Purpose: Deploy updated DATA_EXPORTER package (SPEC + BODY) with parallel processing
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: PACKAGE DATA_EXPORTER
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-835-PREHOOK: Installing CT_MRDS.DATA_EXPORTER Package
|
||||
PROMPT ============================================================================
|
||||
PROMPT Package: CT_MRDS.DATA_EXPORTER
|
||||
PROMPT Version: 2.2.0 -> 2.4.0 (MINOR)
|
||||
PROMPT Change: Added parallel processing + Smart Column Mapping for CSV exports
|
||||
PROMPT ============================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 1: Deploy Package Specification
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy package specification from new_version folder
|
||||
@@new_version\DATA_EXPORTER.pkg
|
||||
|
||||
PROMPT
|
||||
PROMPT Package specification deployment completed.
|
||||
PROMPT
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2: Deploy Package Body
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Deploy package body from new_version folder
|
||||
@@new_version\DATA_EXPORTER.pkb
|
||||
|
||||
PROMPT
|
||||
PROMPT Package body deployment completed.
|
||||
PROMPT
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT DATA_EXPORTER Package installation completed successfully
|
||||
PROMPT ============================================================================
|
||||
PROMPT
|
||||
@@ -0,0 +1,38 @@
|
||||
-- ============================================================================
|
||||
-- MARS-835-PREHOOK Rollback Script 90: Drop A_PARALLEL_EXPORT_CHUNKS Table
|
||||
-- ============================================================================
|
||||
-- Purpose: Remove A_PARALLEL_EXPORT_CHUNKS table created for parallel processing
|
||||
-- Schema: CT_MRDS
|
||||
-- Object: TABLE A_PARALLEL_EXPORT_CHUNKS
|
||||
-- ============================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT MARS-835-PREHOOK: Dropping A_PARALLEL_EXPORT_CHUNKS Table
|
||||
PROMPT ============================================================================
|
||||
PROMPT Table: CT_MRDS.A_PARALLEL_EXPORT_CHUNKS
|
||||
PROMPT Purpose: Remove parallel export chunk processing table
|
||||
PROMPT ============================================================================
|
||||
|
||||
-- Drop table if exists
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'DROP TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS';
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: A_PARALLEL_EXPORT_CHUNKS table dropped');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -942 THEN -- ORA-00942: table or view does not exist
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: A_PARALLEL_EXPORT_CHUNKS table does not exist - nothing to drop');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR: Failed to drop A_PARALLEL_EXPORT_CHUNKS table');
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT ============================================================================
|
||||
PROMPT A_PARALLEL_EXPORT_CHUNKS table rollback completed successfully
|
||||
PROMPT ============================================================================
|
||||
PROMPT
|
||||
@@ -0,0 +1,64 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835-PREHOOK: Rollback ENV_MANAGER Package
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Rollback ENV_MANAGER package (BODY + SPEC) to version 3.1.0
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-01-09
|
||||
-- Related: MARS-835-PREHOOK Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Rolling back ENV_MANAGER Package to v3.1.0
|
||||
PROMPT ========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 1: Rollback Package Specification
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Restore previous package specification from rollback_version folder
|
||||
@@rollback_version\ENV_MANAGER.pkg
|
||||
|
||||
-- Verify compilation (check specific schema when installing as ADMIN)
|
||||
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||
FROM ALL_OBJECTS
|
||||
WHERE OWNER = 'CT_MRDS'
|
||||
AND OBJECT_NAME = 'ENV_MANAGER'
|
||||
AND OBJECT_TYPE = 'PACKAGE';
|
||||
|
||||
PROMPT SUCCESS: ENV_MANAGER Package Specification rolled back to v3.1.0
|
||||
PROMPT
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2: Rollback Package Body
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Restore previous package body from rollback_version folder
|
||||
@@rollback_version\ENV_MANAGER.pkb
|
||||
|
||||
-- Verify compilation (check specific schema when installing as ADMIN)
|
||||
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||
FROM ALL_OBJECTS
|
||||
WHERE OWNER = 'CT_MRDS'
|
||||
AND OBJECT_NAME = 'ENV_MANAGER'
|
||||
AND OBJECT_TYPE = 'PACKAGE BODY';
|
||||
|
||||
-- Check for compilation errors
|
||||
SELECT LINE, POSITION, TEXT
|
||||
FROM ALL_ERRORS
|
||||
WHERE OWNER = 'CT_MRDS'
|
||||
AND NAME = 'ENV_MANAGER'
|
||||
AND TYPE = 'PACKAGE BODY'
|
||||
ORDER BY SEQUENCE;
|
||||
|
||||
PROMPT SUCCESS: ENV_MANAGER Package Body rolled back to v3.1.0
|
||||
PROMPT
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ENV_MANAGER Package rollback completed successfully
|
||||
PROMPT ========================================================================
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,64 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835-PREHOOK: Rollback DATA_EXPORTER Package
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Rollback DATA_EXPORTER package (BODY + SPEC) to version 2.1.0
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-01-09
|
||||
-- Related: MARS-835-PREHOOK Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Rolling back DATA_EXPORTER Package to v2.1.0
|
||||
PROMPT ========================================================================
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 1: Rollback Package Specification
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Restore previous package specification from rollback_version folder
|
||||
@@rollback_version\DATA_EXPORTER.pkg
|
||||
|
||||
-- Verify compilation (check specific schema when installing as ADMIN)
|
||||
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||
FROM ALL_OBJECTS
|
||||
WHERE OWNER = 'CT_MRDS'
|
||||
AND OBJECT_NAME = 'DATA_EXPORTER'
|
||||
AND OBJECT_TYPE = 'PACKAGE';
|
||||
|
||||
PROMPT SUCCESS: DATA_EXPORTER Package Specification rolled back to v2.1.0
|
||||
PROMPT
|
||||
|
||||
PROMPT
|
||||
PROMPT Step 2: Rollback Package Body
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- Restore previous package body from rollback_version folder
|
||||
@@rollback_version\DATA_EXPORTER.pkb
|
||||
|
||||
-- Verify compilation (check specific schema when installing as ADMIN)
|
||||
SELECT OBJECT_NAME, OBJECT_TYPE, STATUS
|
||||
FROM ALL_OBJECTS
|
||||
WHERE OWNER = 'CT_MRDS'
|
||||
AND OBJECT_NAME = 'DATA_EXPORTER'
|
||||
AND OBJECT_TYPE = 'PACKAGE BODY';
|
||||
|
||||
-- Check for compilation errors
|
||||
SELECT LINE, POSITION, TEXT
|
||||
FROM ALL_ERRORS
|
||||
WHERE OWNER = 'CT_MRDS'
|
||||
AND NAME = 'DATA_EXPORTER'
|
||||
AND TYPE = 'PACKAGE BODY'
|
||||
ORDER BY SEQUENCE;
|
||||
|
||||
PROMPT SUCCESS: DATA_EXPORTER Package Body rolled back to v2.1.0
|
||||
PROMPT
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT DATA_EXPORTER Package rollback completed successfully
|
||||
PROMPT ========================================================================
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,82 @@
|
||||
# MARS-835-PREHOOK: Parallel Processing for DATA_EXPORTER
|
||||
|
||||
## Overview
|
||||
Implements parallel partition processing for DATA_EXPORTER package using **DBMS_PARALLEL_EXECUTE** framework.
|
||||
|
||||
## Changes Summary
|
||||
|
||||
### ENV_MANAGER v3.1.0 → v3.2.0
|
||||
- Added `CODE_INVALID_PARALLEL_DEGREE` (-20110) error code
|
||||
- Added `CODE_PARALLEL_EXECUTION_FAILED` (-20111) error code
|
||||
- Added corresponding message constants and exception declarations
|
||||
|
||||
### DATA_EXPORTER v2.2.0 → v2.3.0
|
||||
- Added `pParallelDegree` parameter to `EXPORT_TABLE_DATA_BY_DATE` (default: 1, range: 1-16)
|
||||
- Added `pParallelDegree` parameter to `EXPORT_TABLE_DATA_TO_CSV_BY_DATE` (default: 1, range: 1-16)
|
||||
- Implemented `EXPORT_PARTITION_PARALLEL` callback procedure for DBMS_PARALLEL_EXECUTE
|
||||
- Created global temporary table `A_PARALLEL_EXPORT_CHUNKS` for chunk management
|
||||
- Sequential processing when `pParallelDegree = 1` (default - safest option)
|
||||
- Parallel processing via DBMS_PARALLEL_EXECUTE when `pParallelDegree > 1`
|
||||
- Automatic error detection and reporting through `USER_PARALLEL_EXECUTE_CHUNKS`
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
- Oracle Database 23ai or higher (DBMS_PARALLEL_EXECUTE support)
|
||||
- ADMIN privileges for table creation
|
||||
- CT_MRDS schema for package deployment
|
||||
|
||||
### Installation Command
|
||||
```powershell
|
||||
cd .\MARS_Packages\REL01_POST_DEACTIVATION\MARS-835-PREHOOK
|
||||
echo "YES" | sql "ADMIN/Cloudpass#34@ggmichalski_high" "@install_mars835_prehook.sql"
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Parallel Export (8 threads)
|
||||
```sql
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_TOP',
|
||||
pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'parallel_export',
|
||||
pMinDate => DATE '2020-01-01',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
### Sequential Export
|
||||
```sql
|
||||
BEGIN
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_TOP',
|
||||
pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'sequential_export',
|
||||
pParallelDegree => 1 -- Sequential
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
## Test Results
|
||||
✅ Installation successful
|
||||
✅ ENV_MANAGER v3.2.0 compiled
|
||||
✅ DATA_EXPORTER v2.3.0 compiled
|
||||
✅ Zero partition handling works correctly
|
||||
✅ DBMS_PARALLEL_EXECUTE framework verified
|
||||
|
||||
## Rollback
|
||||
```powershell
|
||||
sql "ADMIN/Cloudpass#34@ggmichalski_high" "@rollback_mars835_prehook.sql"
|
||||
```
|
||||
|
||||
## Author
|
||||
Grzegorz Michalski - 2025-12-20
|
||||
@@ -0,0 +1,93 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835-PREHOOK INSTALL SCRIPT: Parallel Processing + Smart Column Mapping
|
||||
-- ===================================================================
|
||||
-- Purpose: Pre-hook for MARS-835 - Implement DBMS_PARALLEL_EXECUTE + Smart Column Mapping
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-01-09
|
||||
-- Version: ENV_MANAGER 3.2.0, DATA_EXPORTER 2.4.0
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- Log files are automatically created in log/ subdirectory
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/INSTALL_MARS_835_PREHOOK_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Parallel Processing + Smart Column Mapping
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will:
|
||||
PROMPT - Create A_PARALLEL_EXPORT_CHUNKS table with unique timestamp task names
|
||||
PROMPT - Update ENV_MANAGER to v3.2.0 (add parallel execution error codes)
|
||||
PROMPT - Update DATA_EXPORTER to v2.4.0 (DBMS_PARALLEL_EXECUTE + Smart Column Mapping)
|
||||
PROMPT - Add pParallelDegree parameter (1-16 threads) to EXPORT_*_BY_DATE procedures
|
||||
PROMPT - Add pTargetTableOwner/pTargetTableName for CSV column order mapping
|
||||
PROMPT - Fix unique constraint violations with auto-cleanup and timestamp task names
|
||||
PROMPT
|
||||
PROMPT Expected Duration: 2-3 minutes
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm installation with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with installation, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20999, 'Installation aborted by user.');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Create Parallel Export Chunks Table
|
||||
PROMPT =========================================================================
|
||||
@@00_MARS_835_PREHOOK_CREATE_PARALLEL_CHUNKS_TABLE.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Deploy ENV_MANAGER Package
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_835_PREHOOK_install_ENV_MANAGER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Deploy DATA_EXPORTER Package
|
||||
PROMPT =========================================================================
|
||||
@@02_MARS_835_PREHOOK_install_DATA_EXPORTER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Track Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 5: Verify Package Versions
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK Installation - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT Check the log file for complete installation details.
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,69 @@
|
||||
-- ============================================================================
|
||||
-- Table: A_PARALLEL_EXPORT_CHUNKS
|
||||
-- ============================================================================
|
||||
-- Purpose: Permanent table for storing partition export chunks for DBMS_PARALLEL_EXECUTE
|
||||
-- CRITICAL: Must be permanent (not global temporary) because DBMS_PARALLEL_EXECUTE
|
||||
-- runs callbacks in separate sessions that cannot access GTT data from parent session
|
||||
-- Schema: CT_MRDS
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-20
|
||||
-- Modified: 2025-12-21 - Changed from GTT to permanent table
|
||||
-- Related: MARS-835-PREHOOK
|
||||
-- ============================================================================
|
||||
|
||||
-- Drop if exists (in case of re-run)
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'DROP TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS';
|
||||
DBMS_OUTPUT.PUT_LINE('Dropped existing A_PARALLEL_EXPORT_CHUNKS table');
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE != -942 THEN -- ORA-00942: table or view does not exist
|
||||
RAISE;
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('Table A_PARALLEL_EXPORT_CHUNKS does not exist - will create new');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
CREATE TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS (
|
||||
CHUNK_ID NUMBER PRIMARY KEY,
|
||||
TASK_NAME VARCHAR2(100) NOT NULL,
|
||||
YEAR_VALUE VARCHAR2(4) NOT NULL,
|
||||
MONTH_VALUE VARCHAR2(2) NOT NULL,
|
||||
SCHEMA_NAME VARCHAR2(128) NOT NULL,
|
||||
TABLE_NAME VARCHAR2(128) NOT NULL,
|
||||
KEY_COLUMN_NAME VARCHAR2(128) NOT NULL,
|
||||
BUCKET_URI VARCHAR2(4000) NOT NULL,
|
||||
FOLDER_NAME VARCHAR2(1000) NOT NULL,
|
||||
PROCESSED_COLUMNS VARCHAR2(32767),
|
||||
MIN_DATE DATE NOT NULL,
|
||||
MAX_DATE DATE NOT NULL,
|
||||
CREDENTIAL_NAME VARCHAR2(200) NOT NULL,
|
||||
FORMAT_TYPE VARCHAR2(20) NOT NULL,
|
||||
FILE_BASE_NAME VARCHAR2(1000),
|
||||
TEMPLATE_TABLE_NAME VARCHAR2(200),
|
||||
MAX_FILE_SIZE NUMBER DEFAULT 104857600 NOT NULL,
|
||||
CREATED_DATE TIMESTAMP DEFAULT SYSTIMESTAMP NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IX_PARALLEL_CHUNKS_TASK ON CT_MRDS.A_PARALLEL_EXPORT_CHUNKS(TASK_NAME);
|
||||
|
||||
COMMENT ON TABLE CT_MRDS.A_PARALLEL_EXPORT_CHUNKS IS 'Permanent table for parallel export chunk processing (DBMS_PARALLEL_EXECUTE) - permanent because GTT data not visible in parallel callback sessions';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.CHUNK_ID IS 'Unique chunk identifier (partition number)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TASK_NAME IS 'DBMS_PARALLEL_EXECUTE task name for cleanup';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.YEAR_VALUE IS 'Partition year (YYYY)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MONTH_VALUE IS 'Partition month (MM)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.SCHEMA_NAME IS 'Schema owning the source table';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TABLE_NAME IS 'Source table name for export';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.KEY_COLUMN_NAME IS 'Key column for load history join';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.BUCKET_URI IS 'OCI bucket URI for export destination';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FOLDER_NAME IS 'Folder name within bucket';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.PROCESSED_COLUMNS IS 'Comma-separated list of columns to export';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MIN_DATE IS 'Minimum date filter for partition';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MAX_DATE IS 'Maximum date filter for partition';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.CREDENTIAL_NAME IS 'OCI credential name for authentication';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FORMAT_TYPE IS 'Export format: PARQUET or CSV';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.FILE_BASE_NAME IS 'Base filename for CSV exports (NULL for Parquet)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.TEMPLATE_TABLE_NAME IS 'Template table name for per-column date format configuration (e.g., CT_ET_TEMPLATES.TABLE_NAME)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.MAX_FILE_SIZE IS 'Maximum file size in bytes for CSV exports only (e.g., 104857600 = 100MB, 1073741824 = 1GB) - default 100MB (104857600). NOTE: Not applicable for PARQUET format (Oracle limitation)';
|
||||
COMMENT ON COLUMN CT_MRDS.A_PARALLEL_EXPORT_CHUNKS.CREATED_DATE IS 'Timestamp when chunk was created';
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,214 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.5.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2026-01-26 13:30:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.5.0 (2026-01-26): Added recorddelimiter parameter with CRLF (CHR(13)||CHR(10)) for CSV exports to ensure Windows-compatible line endings. Improves cross-platform compatibility when CSV files are opened in Windows applications (Notepad, Excel).' || CHR(10) ||
|
||||
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || CHR(10) ||
|
||||
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
|
||||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,625 @@
|
||||
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||
* It is a standard.
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Example comment:
|
||||
/**
|
||||
* @name EX_PROCEDURE_NAME
|
||||
* @desc Procedure description
|
||||
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||
* @ex_rslt Example Result
|
||||
**/
|
||||
|
||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.2.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-12-20 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (Latest changes first)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'3.2.0 (2025-12-20): Added error codes for parallel execution support (CODE_INVALID_PARALLEL_DEGREE -20110, CODE_PARALLEL_EXECUTION_FAILED -20111)' || CHR(13)||CHR(10) ||
|
||||
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||
|
||||
TYPE Error_Record IS RECORD (
|
||||
code PLS_INTEGER,
|
||||
message VARCHAR2(4000)
|
||||
);
|
||||
|
||||
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||
|
||||
Errors tErrorList;
|
||||
|
||||
|
||||
guid VARCHAR2(32);
|
||||
gvEnv VARCHAR2(200);
|
||||
gvUsername VARCHAR2(128);
|
||||
gvOsuser VARCHAR2(128);
|
||||
gvMachine VARCHAR2(64);
|
||||
gvModule VARCHAR2(64);
|
||||
|
||||
gvNameSpace VARCHAR2(200);
|
||||
gvRegion VARCHAR2(200);
|
||||
gvDataBucketName VARCHAR2(200);
|
||||
gvInboxBucketName VARCHAR2(200);
|
||||
gvArchiveBucketName VARCHAR2(200);
|
||||
gvDataBucketUri VARCHAR2(200);
|
||||
gvInboxBucketUri VARCHAR2(200);
|
||||
gvArchiveBucketUri VARCHAR2(200);
|
||||
gvCredentialName VARCHAR2(200);
|
||||
|
||||
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||
|
||||
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||
|
||||
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
|
||||
vgSourceFileConfigKey PLS_INTEGER;
|
||||
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
--Exceptions
|
||||
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||
|
||||
|
||||
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||
|
||||
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||
|
||||
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||
|
||||
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||
|
||||
|
||||
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||
|
||||
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||
|
||||
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||
,CODE_FILE_VALIDATION_FAILED);
|
||||
|
||||
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||
|
||||
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||
,CODE_NO_CONFIG_MATCH);
|
||||
|
||||
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||
,CODE_UNKNOWN_PREFIX);
|
||||
|
||||
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||
,CODE_TABLE_NOT_EXISTS);
|
||||
|
||||
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||
,CODE_COLUMN_NOT_EXISTS);
|
||||
|
||||
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||
|
||||
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||
,CODE_MISSING_SOURCE_KEY);
|
||||
|
||||
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||
|
||||
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||
,CODE_DUPLICATED_SOURCE_KEY);
|
||||
|
||||
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||
,CODE_MISSING_CONTAINER_CONFIG);
|
||||
|
||||
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||
|
||||
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||
,CODE_WRONG_DESTINATION_PARAM);
|
||||
|
||||
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||
|
||||
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||
,CODE_FILE_ALREADY_REGISTERED);
|
||||
|
||||
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||
|
||||
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||
||cgBL||' It can be set up in two different ways:'
|
||||
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||
;
|
||||
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||
,CODE_ENVIRONMENT_NOT_SET);
|
||||
|
||||
|
||||
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||
|
||||
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||
|
||||
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||
|
||||
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||
|
||||
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||
|
||||
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||
|
||||
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||
|
||||
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||
,CODE_INVALID_BUCKET_AREA);
|
||||
|
||||
ERR_INVALID_PARALLEL_DEGREE EXCEPTION;
|
||||
CODE_INVALID_PARALLEL_DEGREE CONSTANT PLS_INTEGER := -20110;
|
||||
MSG_INVALID_PARALLEL_DEGREE VARCHAR2(4000) := 'Invalid parallel degree parameter. Must be between 1 and 16';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_PARALLEL_DEGREE
|
||||
,CODE_INVALID_PARALLEL_DEGREE);
|
||||
|
||||
ERR_PARALLEL_EXECUTION_FAILED EXCEPTION;
|
||||
CODE_PARALLEL_EXECUTION_FAILED CONSTANT PLS_INTEGER := -20111;
|
||||
MSG_PARALLEL_EXECUTION_FAILED VARCHAR2(4000) := 'Parallel execution failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_PARALLEL_EXECUTION_FAILED
|
||||
,CODE_PARALLEL_EXECUTION_FAILED);
|
||||
|
||||
ERR_UNKNOWN EXCEPTION;
|
||||
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||
,CODE_UNKNOWN);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_EVENT
|
||||
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||
* Respects logging level configuration (gvMinLogLevel).
|
||||
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||
**/
|
||||
PROCEDURE LOG_PROCESS_EVENT (
|
||||
pLogMessage VARCHAR2
|
||||
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_ERROR
|
||||
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||
* This procedure captures comprehensive error information for debugging purposes while
|
||||
* allowing clean user-facing error messages to be raised separately.
|
||||
* @param pLogMessage - Base error message description
|
||||
* @param pParameters - Procedure parameters for context
|
||||
* @param pProcessName - Name of the calling process/package
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||
*/
|
||||
PROCEDURE LOG_PROCESS_ERROR (
|
||||
pLogMessage VARCHAR2
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name INIT_ERRORS
|
||||
* @desc Loads data into Errors array.
|
||||
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||
* Called automatically during package initialization.
|
||||
* @example Called automatically when package is first referenced
|
||||
* @ex_rslt Errors array populated with all error codes and messages
|
||||
**/
|
||||
PROCEDURE INIT_ERRORS;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_DEFAULT_ENV
|
||||
* @desc It returns string with name of default environment.
|
||||
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||
* @ex_rslt dev
|
||||
**/
|
||||
FUNCTION GET_DEFAULT_ENV
|
||||
RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name INIT_VARIABLES
|
||||
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||
* Assign values to following global package variables:
|
||||
* - gvNameSpace
|
||||
* - gvRegion
|
||||
* - gvCredentialName
|
||||
* - gvInboxBucketName
|
||||
* - gvDataBucketName
|
||||
* - gvArchiveBucketName
|
||||
* - gvInboxBucketUri
|
||||
* - gvDataBucketUri
|
||||
* - gvArchiveBucketUri
|
||||
* - gvLoggingEnabled
|
||||
* - gvMinLogLevel
|
||||
* - gvDefaultDateFormat
|
||||
* - gvConsoleLoggingEnabled
|
||||
**/
|
||||
PROCEDURE INIT_VARIABLES(
|
||||
pEnv VARCHAR2
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_MESSAGE
|
||||
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||
* @ex_rslt File not found on the cloud
|
||||
**/
|
||||
FUNCTION GET_ERROR_MESSAGE(
|
||||
pCode PLS_INTEGER
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_STACK
|
||||
* @desc It returns string with all possible error stack info.
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example
|
||||
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||
* pFormat => 'OUTPUT'
|
||||
* ,pCode => -20009
|
||||
* ,pSourceFileReceivedKey => NULL)
|
||||
* from dual
|
||||
* @ex_rslt
|
||||
* ------------------------------------------------------+
|
||||
* Error Message:
|
||||
* ORA-0000: normal, successful completion
|
||||
* -------------------------------------------------------
|
||||
* Error Stack:
|
||||
* -------------------------------------------------------
|
||||
* Error Backtrace:
|
||||
* ------------------------------------------------------+
|
||||
**/
|
||||
FUNCTION GET_ERROR_STACK(
|
||||
pFormat VARCHAR2
|
||||
,pCode PLS_INTEGER
|
||||
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_PARAMETERS
|
||||
* @desc Formats parameter list for logging purposes.
|
||||
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||
* @ex_rslt param1=value1 ,
|
||||
* param2=NULL
|
||||
**/
|
||||
FUNCTION FORMAT_PARAMETERS(
|
||||
pParameterList SYS.ODCIVARCHAR2LIST
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name ANALYZE_VALIDATION_ERRORS
|
||||
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||
* Compares CSV structure with template table and provides specific error analysis.
|
||||
* Includes suggested solutions for common validation issues.
|
||||
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||
* @param pCsvFileUri - URI of CSV file being validated
|
||||
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||
**/
|
||||
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||
pValidationLogTable VARCHAR2,
|
||||
pTemplateSchema VARCHAR2,
|
||||
pTemplateTable VARCHAR2,
|
||||
pCsvFileUri VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name GET_VERSION
|
||||
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||
* @ex_rslt 3.0.0
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_BUILD_INFO
|
||||
* @desc Returns comprehensive build information including version, build date, and author.
|
||||
* Formatted for display in logs or monitoring systems.
|
||||
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||
* @ex_rslt Package: ENV_MANAGER
|
||||
* Version: 3.0.0
|
||||
* Build Date: 2025-10-22 16:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_VERSION_HISTORY
|
||||
* @desc Returns complete version history with all releases and changes.
|
||||
* Shows evolution of package features over time.
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||
* @ex_rslt ENV_MANAGER Version History:
|
||||
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_VERSION_INFO
|
||||
* @desc Universal function to get formatted version information for any package.
|
||||
* This centralized function is used by all packages in the system.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||
* @param pBuildDate - Build date timestamp
|
||||
* @param pAuthor - Package author name
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||
* @ex_rslt Package: FILE_MANAGER
|
||||
* Version: 2.1.0
|
||||
* Build Date: 2025-10-22 15:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName VARCHAR2,
|
||||
pVersion VARCHAR2,
|
||||
pBuildDate VARCHAR2,
|
||||
pAuthor VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_VERSION_HISTORY
|
||||
* @desc Universal function to format version history for any package.
|
||||
* Adds package name header and proper formatting.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersionHistory - Complete version history text
|
||||
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||
* @ex_rslt FILE_MANAGER Version History:
|
||||
* 2.1.0 (2025-10-22): Export procedures...
|
||||
**/
|
||||
FUNCTION FORMAT_VERSION_HISTORY(
|
||||
pPackageName VARCHAR2,
|
||||
pVersionHistory VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name CALCULATE_PACKAGE_HASH
|
||||
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||
* Returns hash for both SPEC and BODY (if exists).
|
||||
* Used for automatic change detection.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||
**/
|
||||
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name TRACK_PACKAGE_VERSION
|
||||
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||
* Automatically detects if source code changed without version update.
|
||||
* Should be called after every package deployment.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||
**/
|
||||
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageVersion VARCHAR2,
|
||||
pPackageBuildDate VARCHAR2,
|
||||
pPackageAuthor VARCHAR2
|
||||
);
|
||||
|
||||
/**
|
||||
* @name CHECK_PACKAGE_CHANGES
|
||||
* @desc Checks if package source code has changed since last tracking.
|
||||
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||
* Returns detailed change detection report.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt WARNING: Package changed without version update!
|
||||
* Last Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Hash (SPEC): B8C4D6E0...
|
||||
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||
**/
|
||||
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_HASH_INFO
|
||||
* @desc Returns formatted information about package hash and tracking history.
|
||||
* Includes current hash, last tracked hash, and change detection status.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||
* Current Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Tracked: 2025-10-22 16:30:00
|
||||
* Status: OK - No changes detected
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
END ENV_MANAGER;
|
||||
/
|
||||
@@ -0,0 +1,85 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835-PREHOOK ROLLBACK SCRIPT: Parallel Processing + Smart Column Mapping
|
||||
-- ===================================================================
|
||||
-- Purpose: Rollback for MARS-835-PREHOOK - Restore previous package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2026-01-09
|
||||
-- Version: ENV_MANAGER 3.1.0, DATA_EXPORTER 2.1.0 (rollback targets)
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/ROLLBACK_MARS_835_PREHOOK_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK: Rollback to Previous Versions
|
||||
PROMPT =========================================================================
|
||||
PROMPT WARNING: This will reverse all changes from MARS-835-PREHOOK installation!
|
||||
PROMPT - Removes A_PARALLEL_EXPORT_CHUNKS table
|
||||
PROMPT - Restores ENV_MANAGER v3.1.0 (removes parallel error codes)
|
||||
PROMPT - Restores DATA_EXPORTER v2.1.0 (removes parallel + Smart Column Mapping)
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm rollback with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with rollback, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20999, 'Rollback aborted by user.');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
-- Execute rollback scripts in reverse order of installation
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 0: Drop A_PARALLEL_EXPORT_CHUNKS Table
|
||||
PROMPT =========================================================================
|
||||
@@90_MARS_835_PREHOOK_rollback_PARALLEL_CHUNKS_TABLE.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Rollback ENV_MANAGER Package
|
||||
PROMPT =========================================================================
|
||||
@@91_MARS_835_PREHOOK_rollback_ENV_MANAGER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Rollback DATA_EXPORTER Package
|
||||
PROMPT =========================================================================
|
||||
@@92_MARS_835_PREHOOK_rollback_DATA_EXPORTER.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Track Rollback Version
|
||||
PROMPT =========================================================================
|
||||
@@track_package_versions.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Package Versions After Rollback
|
||||
PROMPT =========================================================================
|
||||
@@verify_packages_version.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835-PREHOOK Rollback - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,708 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) ||
|
||||
' FROM ' || vTableName;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) ||
|
||||
' = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(32000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
-- Function to add T. prefix to column names
|
||||
FUNCTION addTablePrefix(pColumnList IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
RETURN 'T.*';
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END addTablePrefix;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := addTablePrefix(pColumnList);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
-- Construct the query to extract data for the current key value
|
||||
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(vKeyValueYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(vKeyValueMonth) || '/' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) || '.parquet';
|
||||
|
||||
--DBMS_OUTPUT.PUT_LINE(vQuery);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
-- Function to add T. prefix to column names
|
||||
FUNCTION addTablePrefix(pColumnList IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
RETURN 'T.*';
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END addTablePrefix;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME.csv (without date range)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := addTablePrefix(pColumnList);
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Fetch unique year/month combinations
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
|
||||
-- Loop over each unique year/month combination
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the CSV file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileBaseName) || '_' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) ||
|
||||
vFileExtension;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to CSV file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to CSV file
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vKeyValuesYear.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,163 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.1.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-10-22 15:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,613 @@
|
||||
create or replace PACKAGE CT_MRDS.ENV_MANAGER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* General comment for package: Please put comments for functions and procedures as shown in below example.
|
||||
* It is a standard.
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Example comment:
|
||||
/**
|
||||
* @name EX_PROCEDURE_NAME
|
||||
* @desc Procedure description
|
||||
* @example select ENV_MANAGER.EX_PROCEDURE_NAME(pParameter => 129) from dual;
|
||||
* @ex_rslt Example Result
|
||||
**/
|
||||
|
||||
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '3.1.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2025-10-22 20:57:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
|
||||
|
||||
-- Version History (Latest changes first)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'3.1.0 (2025-10-22): Added package hash tracking and automatic change detection system (SHA256 hashing)' || CHR(13)||CHR(10) ||
|
||||
'3.0.0 (2025-10-22): Added package versioning system with centralized version management functions' || CHR(13)||CHR(10) ||
|
||||
'2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function for comprehensive CSV validation analysis' || CHR(13)||CHR(10) ||
|
||||
'2.0.0 (2025-10-01): Added LOG_PROCESS_ERROR procedure with enhanced error diagnostics and stack traces' || CHR(13)||CHR(10) ||
|
||||
'1.5.0 (2025-09-20): Added console logging support with gvConsoleLoggingEnabled configuration' || CHR(13)||CHR(10) ||
|
||||
'1.0.0 (2025-09-01): Initial release with error management and configuration system';
|
||||
|
||||
TYPE Error_Record IS RECORD (
|
||||
code PLS_INTEGER,
|
||||
message VARCHAR2(4000)
|
||||
);
|
||||
|
||||
TYPE tErrorList IS TABLE OF Error_Record INDEX BY PLS_INTEGER;
|
||||
|
||||
Errors tErrorList;
|
||||
|
||||
|
||||
guid VARCHAR2(32);
|
||||
gvEnv VARCHAR2(200);
|
||||
gvUsername VARCHAR2(128);
|
||||
gvOsuser VARCHAR2(128);
|
||||
gvMachine VARCHAR2(64);
|
||||
gvModule VARCHAR2(64);
|
||||
|
||||
gvNameSpace VARCHAR2(200);
|
||||
gvRegion VARCHAR2(200);
|
||||
gvDataBucketName VARCHAR2(200);
|
||||
gvInboxBucketName VARCHAR2(200);
|
||||
gvArchiveBucketName VARCHAR2(200);
|
||||
gvDataBucketUri VARCHAR2(200);
|
||||
gvInboxBucketUri VARCHAR2(200);
|
||||
gvArchiveBucketUri VARCHAR2(200);
|
||||
gvCredentialName VARCHAR2(200);
|
||||
|
||||
-- Overwritten by variable "LoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
-- Overwritten by variable "MinLogLevel" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
-- Possible values: DEBUG ,INFO ,WARNING ,ERROR
|
||||
gvMinLogLevel VARCHAR2(10) := 'DEBUG';
|
||||
|
||||
-- Overwritten by variable "DefaultDateFormat" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvDefaultDateFormat VARCHAR2(200) := 'DD/MM/YYYY HH24:MI:SS';
|
||||
|
||||
-- Overwritten by variable "ConsoleLoggingEnabled" in A_FILE_MANAGER_CONFIG.CONFIG_VARIABLE table
|
||||
gvConsoleLoggingEnabled VARCHAR2(3) := 'ON'; -- 'ON' or 'OFF'
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
|
||||
vgSourceFileConfigKey PLS_INTEGER;
|
||||
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
--Exceptions
|
||||
ERR_EMPTY_FILEURI_AND_RECKEY EXCEPTION;
|
||||
CODE_EMPTY_FILEURI_AND_RECKEY CONSTANT PLS_INTEGER := -20001;
|
||||
MSG_EMPTY_FILEURI_AND_RECKEY VARCHAR2(4000) := 'Either pFileUri or pSourceFileReceivedKey must be not null';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EMPTY_FILEURI_AND_RECKEY
|
||||
,CODE_EMPTY_FILEURI_AND_RECKEY);
|
||||
|
||||
|
||||
ERR_NO_CONFIG_MATCH_FOR_FILEURI EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH_FOR_FILEURI CONSTANT PLS_INTEGER := -20002;
|
||||
MSG_NO_CONFIG_MATCH_FOR_FILEURI VARCHAR2(4000) := 'No match for source file in A_SOURCE_FILE_CONFIG table'
|
||||
||cgBL||' The file provided in parameter: pFileUri does not have '
|
||||
||cgBL||' coresponding configuration in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH_FOR_FILEURI
|
||||
,CODE_NO_CONFIG_MATCH_FOR_FILEURI);
|
||||
|
||||
ERR_MULTIPLE_MATCH_FOR_SRCFILE EXCEPTION;
|
||||
CODE_MULTIPLE_MATCH_FOR_SRCFILE CONSTANT PLS_INTEGER := -20003;
|
||||
MSG_MULTIPLE_MATCH_FOR_SRCFILE VARCHAR2(4000) := 'Multiple match for source file in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_MATCH_FOR_SRCFILE
|
||||
,CODE_MULTIPLE_MATCH_FOR_SRCFILE);
|
||||
|
||||
ERR_MISSING_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MISSING_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20004;
|
||||
MSG_MISSING_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Missing entry in config table: A_COLUMN_DATE_FORMAT primary key(TEMPLATE_TABLE_NAME, COLUMN_NAME)'
|
||||
||cgBL||' Remember: each column which data_type IN (''DATE'', ''TIMESTAMP'')'
|
||||
||cgBL||' should have DateFormat specified in A_COLUMN_DATE_FORMAT table '
|
||||
||cgBL||' for example: ''YYYY-MM-DD''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_COLUMN_DATE_FORMAT
|
||||
,CODE_MISSING_COLUMN_DATE_FORMAT);
|
||||
|
||||
ERR_MULTIPLE_COLUMN_DATE_FORMAT EXCEPTION;
|
||||
CODE_MULTIPLE_COLUMN_DATE_FORMAT CONSTANT PLS_INTEGER := -20005;
|
||||
MSG_MULTIPLE_COLUMN_DATE_FORMAT VARCHAR2(4000) := 'Multiple records for date format in A_COLUMN_DATE_FORMAT table'
|
||||
||cgBL||' There should be only one format specified for each DAT/TIMESTAMP column';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_COLUMN_DATE_FORMAT
|
||||
,CODE_MULTIPLE_COLUMN_DATE_FORMAT);
|
||||
|
||||
|
||||
ERR_DIDNT_GET_LOAD_OPERATION_ID EXCEPTION;
|
||||
CODE_DIDNT_GET_LOAD_OPERATION_ID CONSTANT PLS_INTEGER := -20006;
|
||||
MSG_DIDNT_GET_LOAD_OPERATION_ID VARCHAR2(4000) := 'Didnt get load operation id from external table validation';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DIDNT_GET_LOAD_OPERATION_ID
|
||||
,CODE_DIDNT_GET_LOAD_OPERATION_ID);
|
||||
|
||||
ERR_NO_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_NO_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20007;
|
||||
MSG_NO_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'No match for received source file in A_SOURCE_FILE_CONFIG '
|
||||
||cgBL||' or missing data in A_SOURCE_FILE_RECEIVED table for provided pSourceFileReceivedKey parameter';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_NO_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_MULTI_CONFIG_FOR_RECEIVED_FILE EXCEPTION;
|
||||
CODE_MULTI_CONFIG_FOR_RECEIVED_FILE CONSTANT PLS_INTEGER := -20008;
|
||||
MSG_MULTI_CONFIG_FOR_RECEIVED_FILE VARCHAR2(4000) := 'Multiple matchs for received source file in A_SOURCE_FILE_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTI_CONFIG_FOR_RECEIVED_FILE
|
||||
,CODE_MULTI_CONFIG_FOR_RECEIVED_FILE);
|
||||
|
||||
ERR_FILE_NOT_FOUND_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_FOUND_ON_CLOUD CONSTANT PLS_INTEGER := -20009;
|
||||
MSG_FILE_NOT_FOUND_ON_CLOUD VARCHAR2(4000) := 'File not found on the cloud';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_FOUND_ON_CLOUD
|
||||
,CODE_FILE_NOT_FOUND_ON_CLOUD);
|
||||
|
||||
ERR_FILE_VALIDATION_FAILED EXCEPTION;
|
||||
CODE_FILE_VALIDATION_FAILED CONSTANT PLS_INTEGER := -20010;
|
||||
MSG_FILE_VALIDATION_FAILED VARCHAR2(4000) := 'File validation failed';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_VALIDATION_FAILED
|
||||
,CODE_FILE_VALIDATION_FAILED);
|
||||
|
||||
ERR_EXCESS_COLUMNS_DETECTED EXCEPTION;
|
||||
CODE_EXCESS_COLUMNS_DETECTED CONSTANT PLS_INTEGER := -20011;
|
||||
MSG_EXCESS_COLUMNS_DETECTED VARCHAR2(4000) := 'CSV file contains more columns than template allows';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXCESS_COLUMNS_DETECTED
|
||||
,CODE_EXCESS_COLUMNS_DETECTED);
|
||||
|
||||
ERR_NO_CONFIG_MATCH EXCEPTION;
|
||||
CODE_NO_CONFIG_MATCH CONSTANT PLS_INTEGER := -20012;
|
||||
MSG_NO_CONFIG_MATCH VARCHAR2(4000) := 'No match for specified parameters in A_SOURCE_FILE_CONFIG table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NO_CONFIG_MATCH
|
||||
,CODE_NO_CONFIG_MATCH);
|
||||
|
||||
ERR_UNKNOWN_PREFIX EXCEPTION;
|
||||
CODE_UNKNOWN_PREFIX CONSTANT PLS_INTEGER := -20013;
|
||||
MSG_UNKNOWN_PREFIX VARCHAR2(4000) := 'Unknown prefix';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN_PREFIX
|
||||
,CODE_UNKNOWN_PREFIX);
|
||||
|
||||
ERR_TABLE_NOT_EXISTS EXCEPTION;
|
||||
CODE_TABLE_NOT_EXISTS CONSTANT PLS_INTEGER := -20014;
|
||||
MSG_TABLE_NOT_EXISTS VARCHAR2(4000) := 'Table does not exist';
|
||||
PRAGMA EXCEPTION_INIT( ERR_TABLE_NOT_EXISTS
|
||||
,CODE_TABLE_NOT_EXISTS);
|
||||
|
||||
ERR_COLUMN_NOT_EXISTS EXCEPTION;
|
||||
CODE_COLUMN_NOT_EXISTS CONSTANT PLS_INTEGER := -20015;
|
||||
MSG_COLUMN_NOT_EXISTS VARCHAR2(4000) := 'Column does not exist in table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_COLUMN_NOT_EXISTS
|
||||
,CODE_COLUMN_NOT_EXISTS);
|
||||
|
||||
ERR_UNSUPPORTED_DATA_TYPE EXCEPTION;
|
||||
CODE_UNSUPPORTED_DATA_TYPE CONSTANT PLS_INTEGER := -20016;
|
||||
MSG_UNSUPPORTED_DATA_TYPE VARCHAR2(4000) := 'Unsupported data type';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNSUPPORTED_DATA_TYPE
|
||||
,CODE_UNSUPPORTED_DATA_TYPE);
|
||||
|
||||
ERR_MISSING_SOURCE_KEY EXCEPTION;
|
||||
CODE_MISSING_SOURCE_KEY CONSTANT PLS_INTEGER := -20017;
|
||||
MSG_MISSING_SOURCE_KEY VARCHAR2(4000) := 'The Source was not found in parent table A_SOURCE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_SOURCE_KEY
|
||||
,CODE_MISSING_SOURCE_KEY);
|
||||
|
||||
ERR_NULL_SOURCE_FILE_CONFIG_KEY EXCEPTION;
|
||||
CODE_NULL_SOURCE_FILE_CONFIG_KEY CONSTANT PLS_INTEGER := -20018;
|
||||
MSG_NULL_SOURCE_FILE_CONFIG_KEY VARCHAR2(4000) := 'No entry in A_SOURCE_FILE_CONFIG table for specified A_SOURCE_FILE_CONFIG_KEY';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NULL_SOURCE_FILE_CONFIG_KEY
|
||||
,CODE_NULL_SOURCE_FILE_CONFIG_KEY);
|
||||
|
||||
ERR_DUPLICATED_SOURCE_KEY EXCEPTION;
|
||||
CODE_DUPLICATED_SOURCE_KEY CONSTANT PLS_INTEGER := -20019;
|
||||
MSG_DUPLICATED_SOURCE_KEY VARCHAR2(4000) := 'The Source already exists in the A_SOURCE table';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DUPLICATED_SOURCE_KEY
|
||||
,CODE_DUPLICATED_SOURCE_KEY);
|
||||
|
||||
ERR_MISSING_CONTAINER_CONFIG EXCEPTION;
|
||||
CODE_MISSING_CONTAINER_CONFIG CONSTANT PLS_INTEGER := -20020;
|
||||
MSG_MISSING_CONTAINER_CONFIG VARCHAR2(4000) := 'No match in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MISSING_CONTAINER_CONFIG
|
||||
,CODE_MISSING_CONTAINER_CONFIG);
|
||||
|
||||
ERR_MULTIPLE_CONTAINER_ENTRIES EXCEPTION;
|
||||
CODE_MULTIPLE_CONTAINER_ENTRIES CONSTANT PLS_INTEGER := -20021;
|
||||
MSG_MULTIPLE_CONTAINER_ENTRIES VARCHAR2(4000) := 'Multiple matches in A_SOURCE_FILE_CONFIG table where SOURCE_FILE_TYPE=''CONTAINER'' and specified SOURCE_FILE_ID';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MULTIPLE_CONTAINER_ENTRIES
|
||||
,CODE_MULTIPLE_CONTAINER_ENTRIES);
|
||||
|
||||
ERR_WRONG_DESTINATION_PARAM EXCEPTION;
|
||||
CODE_WRONG_DESTINATION_PARAM CONSTANT PLS_INTEGER := -20022;
|
||||
MSG_WRONG_DESTINATION_PARAM VARCHAR2(4000) := 'Wrong destination parameter provided.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DESTINATION_PARAM
|
||||
,CODE_WRONG_DESTINATION_PARAM);
|
||||
|
||||
ERR_FILE_NOT_EXISTS_ON_CLOUD EXCEPTION;
|
||||
CODE_FILE_NOT_EXISTS_ON_CLOUD CONSTANT PLS_INTEGER := -20023;
|
||||
MSG_FILE_NOT_EXISTS_ON_CLOUD VARCHAR2(4000) := 'File not exists on cloud.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_NOT_EXISTS_ON_CLOUD
|
||||
,CODE_FILE_NOT_EXISTS_ON_CLOUD);
|
||||
|
||||
ERR_FILE_ALREADY_REGISTERED EXCEPTION;
|
||||
CODE_FILE_ALREADY_REGISTERED CONSTANT PLS_INTEGER := -20024;
|
||||
MSG_FILE_ALREADY_REGISTERED VARCHAR2(4000) := 'File already registered in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_FILE_ALREADY_REGISTERED
|
||||
,CODE_FILE_ALREADY_REGISTERED);
|
||||
|
||||
ERR_WRONG_DATE_TIMESTAMP_FORMAT EXCEPTION;
|
||||
CODE_WRONG_DATE_TIMESTAMP_FORMAT CONSTANT PLS_INTEGER := -20025;
|
||||
MSG_WRONG_DATE_TIMESTAMP_FORMAT VARCHAR2(4000) := 'Provided DATE or TIMESTAMP format has errors (possible duplicated codes, ex: ''DD'').';
|
||||
PRAGMA EXCEPTION_INIT( ERR_WRONG_DATE_TIMESTAMP_FORMAT
|
||||
,CODE_WRONG_DATE_TIMESTAMP_FORMAT);
|
||||
|
||||
ERR_ENVIRONMENT_NOT_SET EXCEPTION;
|
||||
CODE_ENVIRONMENT_NOT_SET CONSTANT PLS_INTEGER := -20026;
|
||||
MSG_ENVIRONMENT_NOT_SET VARCHAR2(4000) := 'EnvironmentID not set'
|
||||
||cgBL||' Information about environment is needed to get proper configuration values.'
|
||||
||cgBL||' It can be set up in two different ways:'
|
||||
||cgBL||' 1. Set it on session level: execute DBMS_SESSION.SET_IDENTIFIER (client_id => ''dev'')'
|
||||
||cgBL||' 2. Set it on configuration level: Insert into CT_MRDS.A_FILE_MANAGER_CONFIG (ENVIRONMENT_ID,CONFIG_VARIABLE,CONFIG_VARIABLE_VALUE) values (''default'',''environment_id'',''dev'')'
|
||||
||cgBL||' Session level setup (1.) takes precedence over configuration level one (2.)'
|
||||
;
|
||||
PRAGMA EXCEPTION_INIT( ERR_ENVIRONMENT_NOT_SET
|
||||
,CODE_ENVIRONMENT_NOT_SET);
|
||||
|
||||
|
||||
ERR_CONFIG_VARIABLE_NOT_SET EXCEPTION;
|
||||
CODE_CONFIG_VARIABLE_NOT_SET CONSTANT PLS_INTEGER := -20027;
|
||||
MSG_CONFIG_VARIABLE_NOT_SET VARCHAR2(4000) := 'Missing configuration value in A_FILE_MANAGER_CONFIG';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CONFIG_VARIABLE_NOT_SET
|
||||
,CODE_CONFIG_VARIABLE_NOT_SET);
|
||||
|
||||
ERR_NOT_INPUT_SOURCE_FILE_TYPE EXCEPTION;
|
||||
CODE_NOT_INPUT_SOURCE_FILE_TYPE CONSTANT PLS_INTEGER := -20028;
|
||||
MSG_NOT_INPUT_SOURCE_FILE_TYPE VARCHAR2(4000) := 'Archival can be executed only for A_SOURCE_FILE_CONFIG_KEY where SOURCE_FILE_TYPE=''INPUT''';
|
||||
PRAGMA EXCEPTION_INIT( ERR_NOT_INPUT_SOURCE_FILE_TYPE
|
||||
,CODE_NOT_INPUT_SOURCE_FILE_TYPE);
|
||||
|
||||
ERR_EXP_DATA_FOR_ARCH_FAILED EXCEPTION;
|
||||
CODE_EXP_DATA_FOR_ARCH_FAILED CONSTANT PLS_INTEGER := -20029;
|
||||
MSG_EXP_DATA_FOR_ARCH_FAILED VARCHAR2(4000) := 'Export data for archival failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_EXP_DATA_FOR_ARCH_FAILED
|
||||
,CODE_EXP_DATA_FOR_ARCH_FAILED);
|
||||
|
||||
ERR_RESTORE_FILE_FROM_TRASH EXCEPTION;
|
||||
CODE_RESTORE_FILE_FROM_TRASH CONSTANT PLS_INTEGER := -20030;
|
||||
MSG_RESTORE_FILE_FROM_TRASH VARCHAR2(4000) := 'Unexpected issues occured while archival process. Restoration of exported files failed.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_RESTORE_FILE_FROM_TRASH
|
||||
,CODE_RESTORE_FILE_FROM_TRASH);
|
||||
|
||||
ERR_CHANGE_STAT_TO_ARCHIVED_FAILED EXCEPTION;
|
||||
CODE_CHANGE_STAT_TO_ARCHIVED_FAILED CONSTANT PLS_INTEGER := -20031;
|
||||
MSG_CHANGE_STAT_TO_ARCHIVED_FAILED VARCHAR2(4000) := 'Failed to change file status to: ARCHIVED in A_SOURCE_FILE_RECEIVED table.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_CHANGE_STAT_TO_ARCHIVED_FAILED
|
||||
,CODE_CHANGE_STAT_TO_ARCHIVED_FAILED);
|
||||
|
||||
ERR_MOVE_FILE_TO_TRASH_FAILED EXCEPTION;
|
||||
CODE_MOVE_FILE_TO_TRASH_FAILED CONSTANT PLS_INTEGER := -20032;
|
||||
MSG_MOVE_FILE_TO_TRASH_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_MOVE_FILE_TO_TRASH_FAILED
|
||||
,CODE_MOVE_FILE_TO_TRASH_FAILED);
|
||||
|
||||
ERR_DROP_EXPORTED_FILES_FAILED EXCEPTION;
|
||||
CODE_DROP_EXPORTED_FILES_FAILED CONSTANT PLS_INTEGER := -20033;
|
||||
MSG_DROP_EXPORTED_FILES_FAILED VARCHAR2(4000) := 'FAILED to move file to TRASH before DROPPING it.';
|
||||
PRAGMA EXCEPTION_INIT( ERR_DROP_EXPORTED_FILES_FAILED
|
||||
,CODE_DROP_EXPORTED_FILES_FAILED);
|
||||
|
||||
ERR_INVALID_BUCKET_AREA EXCEPTION;
|
||||
CODE_INVALID_BUCKET_AREA CONSTANT PLS_INTEGER := -20034;
|
||||
MSG_INVALID_BUCKET_AREA VARCHAR2(4000) := 'Invalid bucket area specified. Valid values: INBOX, ODS, DATA, ARCHIVE';
|
||||
PRAGMA EXCEPTION_INIT( ERR_INVALID_BUCKET_AREA
|
||||
,CODE_INVALID_BUCKET_AREA);
|
||||
|
||||
ERR_UNKNOWN EXCEPTION;
|
||||
CODE_UNKNOWN CONSTANT PLS_INTEGER := -20999;
|
||||
MSG_UNKNOWN VARCHAR2(4000) := 'Unknown Error Occured';
|
||||
PRAGMA EXCEPTION_INIT( ERR_UNKNOWN
|
||||
,CODE_UNKNOWN);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_EVENT
|
||||
* @desc Insert a new log record into A_PROCESS_LOG table.
|
||||
* Also outputs to console if gvConsoleLoggingEnabled = 'ON'.
|
||||
* Respects logging level configuration (gvMinLogLevel).
|
||||
* @example ENV_MANAGER.LOG_PROCESS_EVENT('Process completed successfully', 'INFO', 'pParam1=value1');
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table and optionally displayed in console output
|
||||
**/
|
||||
PROCEDURE LOG_PROCESS_EVENT (
|
||||
pLogMessage VARCHAR2
|
||||
,pLogLevel VARCHAR2 DEFAULT 'ERROR'
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name LOG_PROCESS_ERROR
|
||||
* @desc Insert a detailed error record into A_PROCESS_LOG table with full stack trace, backtrace, and call stack.
|
||||
* This procedure captures comprehensive error information for debugging purposes while
|
||||
* allowing clean user-facing error messages to be raised separately.
|
||||
* @param pLogMessage - Base error message description
|
||||
* @param pParameters - Procedure parameters for context
|
||||
* @param pProcessName - Name of the calling process/package
|
||||
* @ex_rslt Record inserted into A_PROCESS_LOG table with complete error stack information
|
||||
*/
|
||||
PROCEDURE LOG_PROCESS_ERROR (
|
||||
pLogMessage VARCHAR2
|
||||
,pParameters VARCHAR2 DEFAULT NULL
|
||||
,pProcessName VARCHAR2 DEFAULT 'FILE_MANAGER'
|
||||
);
|
||||
|
||||
/**
|
||||
* @name INIT_ERRORS
|
||||
* @desc Loads data into Errors array.
|
||||
* Errors array is a list of Record(Error_Code, Error_Message) index by Error_Code.
|
||||
* Called automatically during package initialization.
|
||||
* @example Called automatically when package is first referenced
|
||||
* @ex_rslt Errors array populated with all error codes and messages
|
||||
**/
|
||||
PROCEDURE INIT_ERRORS;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_DEFAULT_ENV
|
||||
* @desc It returns string with name of default environment.
|
||||
* Return string is A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID value.
|
||||
* @example select ENV_MANAGER.GET_DEFAULT_ENV() from dual;
|
||||
* @ex_rslt dev
|
||||
**/
|
||||
FUNCTION GET_DEFAULT_ENV
|
||||
RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name INIT_VARIABLES
|
||||
* @desc For specified pEnv parameter (A_FILE_MANAGER_CONFIG.ENVIRONMENT_ID)
|
||||
* Assign values to following global package variables:
|
||||
* - gvNameSpace
|
||||
* - gvRegion
|
||||
* - gvCredentialName
|
||||
* - gvInboxBucketName
|
||||
* - gvDataBucketName
|
||||
* - gvArchiveBucketName
|
||||
* - gvInboxBucketUri
|
||||
* - gvDataBucketUri
|
||||
* - gvArchiveBucketUri
|
||||
* - gvLoggingEnabled
|
||||
* - gvMinLogLevel
|
||||
* - gvDefaultDateFormat
|
||||
* - gvConsoleLoggingEnabled
|
||||
**/
|
||||
PROCEDURE INIT_VARIABLES(
|
||||
pEnv VARCHAR2
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_MESSAGE
|
||||
* @desc It returns string with error message for specified pCode (Error_Code).
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example select ENV_MANAGER.GET_ERROR_MESSAGE(pCode => -20009) from dual;
|
||||
* @ex_rslt File not found on the cloud
|
||||
**/
|
||||
FUNCTION GET_ERROR_MESSAGE(
|
||||
pCode PLS_INTEGER
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name GET_ERROR_STACK
|
||||
* @desc It returns string with all possible error stack info.
|
||||
* Error message is take from Errors Array loaded by INIT_ERRORS procedure
|
||||
* @example
|
||||
* select ENV_MANAGER.GET_ERROR_STACK(
|
||||
* pFormat => 'OUTPUT'
|
||||
* ,pCode => -20009
|
||||
* ,pSourceFileReceivedKey => NULL)
|
||||
* from dual
|
||||
* @ex_rslt
|
||||
* ------------------------------------------------------+
|
||||
* Error Message:
|
||||
* ORA-0000: normal, successful completion
|
||||
* -------------------------------------------------------
|
||||
* Error Stack:
|
||||
* -------------------------------------------------------
|
||||
* Error Backtrace:
|
||||
* ------------------------------------------------------+
|
||||
**/
|
||||
FUNCTION GET_ERROR_STACK(
|
||||
pFormat VARCHAR2
|
||||
,pCode PLS_INTEGER
|
||||
,pSourceFileReceivedKey CT_MRDS.A_SOURCE_FILE_RECEIVED.A_SOURCE_FILE_RECEIVED_KEY%TYPE DEFAULT NULL
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_PARAMETERS
|
||||
* @desc Formats parameter list for logging purposes.
|
||||
* Converts SYS.ODCIVARCHAR2LIST to formatted string with proper NULL handling.
|
||||
* @example select ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST('param1=value1', 'param2=NULL')) from dual;
|
||||
* @ex_rslt param1=value1 ,
|
||||
* param2=NULL
|
||||
**/
|
||||
FUNCTION FORMAT_PARAMETERS(
|
||||
pParameterList SYS.ODCIVARCHAR2LIST
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name ANALYZE_VALIDATION_ERRORS
|
||||
* @desc Analyzes CSV validation errors and generates detailed diagnostic report.
|
||||
* Compares CSV structure with template table and provides specific error analysis.
|
||||
* Includes suggested solutions for common validation issues.
|
||||
* @param pValidationLogTable - Name of validation log table (e.g., VALIDATE$242_LOG)
|
||||
* @param pTemplateSchema - Schema of template table (e.g., CT_ET_TEMPLATES)
|
||||
* @param pTemplateTable - Name of template table (e.g., MOCK_PROC_TABLE)
|
||||
* @param pCsvFileUri - URI of CSV file being validated
|
||||
* @example SELECT ENV_MANAGER.ANALYZE_VALIDATION_ERRORS('VALIDATE$242_LOG', 'CT_ET_TEMPLATES', 'MOCK_PROC_TABLE', 'https://...') FROM DUAL;
|
||||
* @ex_rslt Detailed validation analysis report with column mismatches and solutions
|
||||
**/
|
||||
FUNCTION ANALYZE_VALIDATION_ERRORS(
|
||||
pValidationLogTable VARCHAR2,
|
||||
pTemplateSchema VARCHAR2,
|
||||
pTemplateTable VARCHAR2,
|
||||
pCsvFileUri VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name GET_VERSION
|
||||
* @desc Returns the current version number of the ENV_MANAGER package.
|
||||
* Uses semantic versioning format (MAJOR.MINOR.PATCH).
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION() FROM DUAL;
|
||||
* @ex_rslt 3.0.0
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_BUILD_INFO
|
||||
* @desc Returns comprehensive build information including version, build date, and author.
|
||||
* Formatted for display in logs or monitoring systems.
|
||||
* @example SELECT ENV_MANAGER.GET_BUILD_INFO() FROM DUAL;
|
||||
* @ex_rslt Package: ENV_MANAGER
|
||||
* Version: 3.0.0
|
||||
* Build Date: 2025-10-22 16:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_VERSION_HISTORY
|
||||
* @desc Returns complete version history with all releases and changes.
|
||||
* Shows evolution of package features over time.
|
||||
* @example SELECT ENV_MANAGER.GET_VERSION_HISTORY() FROM DUAL;
|
||||
* @ex_rslt ENV_MANAGER Version History:
|
||||
* 3.0.0 (2025-10-22): Added package versioning system...
|
||||
* 2.1.0 (2025-10-15): Added ANALYZE_VALIDATION_ERRORS function...
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_VERSION_INFO
|
||||
* @desc Universal function to get formatted version information for any package.
|
||||
* This centralized function is used by all packages in the system.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersion - Version string (MAJOR.MINOR.PATCH format)
|
||||
* @param pBuildDate - Build date timestamp
|
||||
* @param pAuthor - Package author name
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_VERSION_INFO('FILE_MANAGER', '2.1.0', '2025-10-22 15:00:00', 'Grzegorz Michalski') FROM DUAL;
|
||||
* @ex_rslt Package: FILE_MANAGER
|
||||
* Version: 2.1.0
|
||||
* Build Date: 2025-10-22 15:00:00
|
||||
* Author: Grzegorz Michalski
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName VARCHAR2,
|
||||
pVersion VARCHAR2,
|
||||
pBuildDate VARCHAR2,
|
||||
pAuthor VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name FORMAT_VERSION_HISTORY
|
||||
* @desc Universal function to format version history for any package.
|
||||
* Adds package name header and proper formatting.
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pVersionHistory - Complete version history text
|
||||
* @example SELECT ENV_MANAGER.FORMAT_VERSION_HISTORY('FILE_MANAGER', '2.1.0 (2025-10-22): Export procedures...') FROM DUAL;
|
||||
* @ex_rslt FILE_MANAGER Version History:
|
||||
* 2.1.0 (2025-10-22): Export procedures...
|
||||
**/
|
||||
FUNCTION FORMAT_VERSION_HISTORY(
|
||||
pPackageName VARCHAR2,
|
||||
pVersionHistory VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- PACKAGE HASH + CHANGE DETECTION FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name CALCULATE_PACKAGE_HASH
|
||||
* @desc Calculates SHA256 hash of package source code from ALL_SOURCE.
|
||||
* Returns hash for both SPEC and BODY (if exists).
|
||||
* Used for automatic change detection.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageType - Type of package code ('PACKAGE' for SPEC, 'PACKAGE BODY' for BODY)
|
||||
* @example SELECT ENV_MANAGER.CALCULATE_PACKAGE_HASH('CT_MRDS', 'FILE_MANAGER', 'PACKAGE') FROM DUAL;
|
||||
* @ex_rslt A7B3C5D9E8F1234567890ABCDEF... (64-character SHA256 hash)
|
||||
**/
|
||||
FUNCTION CALCULATE_PACKAGE_HASH(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageType VARCHAR2 -- 'PACKAGE' or 'PACKAGE BODY'
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name TRACK_PACKAGE_VERSION
|
||||
* @desc Records package version and source code hash in A_PACKAGE_VERSION_TRACKING table.
|
||||
* Automatically detects if source code changed without version update.
|
||||
* Should be called after every package deployment.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @param pPackageVersion - Current version from PACKAGE_VERSION constant
|
||||
* @param pPackageBuildDate - Build date from PACKAGE_BUILD_DATE constant
|
||||
* @param pPackageAuthor - Author from PACKAGE_AUTHOR constant
|
||||
* @example EXEC ENV_MANAGER.TRACK_PACKAGE_VERSION('CT_MRDS', 'FILE_MANAGER', '3.2.0', '2025-10-22 16:30:00', 'Grzegorz Michalski');
|
||||
* @ex_rslt Record inserted into A_PACKAGE_VERSION_TRACKING with change detection status
|
||||
**/
|
||||
PROCEDURE TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2,
|
||||
pPackageVersion VARCHAR2,
|
||||
pPackageBuildDate VARCHAR2,
|
||||
pPackageAuthor VARCHAR2
|
||||
);
|
||||
|
||||
/**
|
||||
* @name CHECK_PACKAGE_CHANGES
|
||||
* @desc Checks if package source code has changed since last tracking.
|
||||
* Compares current hash with last recorded hash in A_PACKAGE_VERSION_TRACKING.
|
||||
* Returns detailed change detection report.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt WARNING: Package changed without version update!
|
||||
* Last Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Hash (SPEC): B8C4D6E0...
|
||||
* RECOMMENDATION: Update PACKAGE_VERSION and PACKAGE_BUILD_DATE
|
||||
**/
|
||||
FUNCTION CHECK_PACKAGE_CHANGES(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* @name GET_PACKAGE_HASH_INFO
|
||||
* @desc Returns formatted information about package hash and tracking history.
|
||||
* Includes current hash, last tracked hash, and change detection status.
|
||||
* @param pPackageOwner - Schema owner of the package
|
||||
* @param pPackageName - Name of the package
|
||||
* @example SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('CT_MRDS', 'FILE_MANAGER') FROM DUAL;
|
||||
* @ex_rslt Package: CT_MRDS.FILE_MANAGER
|
||||
* Current Version: 3.2.0
|
||||
* Current Hash (SPEC): A7B3C5D9...
|
||||
* Last Tracked: 2025-10-22 16:30:00
|
||||
* Status: OK - No changes detected
|
||||
**/
|
||||
FUNCTION GET_PACKAGE_HASH_INFO(
|
||||
pPackageOwner VARCHAR2,
|
||||
pPackageName VARCHAR2
|
||||
) RETURN VARCHAR2;
|
||||
|
||||
END ENV_MANAGER;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,708 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) ||
|
||||
' FROM ' || vTableName;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT * FROM ' || vTableName ||
|
||||
' WHERE ' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) ||
|
||||
' = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(32000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
-- Function to add T. prefix to column names
|
||||
FUNCTION addTablePrefix(pColumnList IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
RETURN 'T.*';
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END addTablePrefix;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := addTablePrefix(pColumnList);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
-- Construct the query to extract data for the current key value
|
||||
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(vKeyValueYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(vKeyValueMonth) || '/' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) || '.parquet';
|
||||
|
||||
--DBMS_OUTPUT.PUT_LINE(vQuery);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
-- Function to add T. prefix to column names
|
||||
FUNCTION addTablePrefix(pColumnList IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
RETURN 'T.*';
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END addTablePrefix;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME.csv (without date range)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := addTablePrefix(pColumnList);
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Fetch unique year/month combinations
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
|
||||
-- Loop over each unique year/month combination
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_WORKFLOW_HISTORY_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the CSV file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileBaseName) || '_' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) ||
|
||||
vFileExtension;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to CSV file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to CSV file
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vKeyValuesYear.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,163 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.1.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-10-22 15:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_WORKFLOW_HISTORY_KEY',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,733 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
-- Internal shared function to process column list with T. prefix and key column mapping
|
||||
FUNCTION processColumnList(pColumnList IN VARCHAR2, pTableName IN VARCHAR2, pSchemaName IN VARCHAR2, pKeyColumnName IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllCols VARCHAR2(32767);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
-- Build list of all columns
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllCols
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
-- Add T. prefix to all columns
|
||||
vResult := 'T.' || REPLACE(vAllCols, ', ', ', T.');
|
||||
|
||||
-- Replace key column with aliased version (e.g., T.A_ETL_LOAD_SET_KEY_FK AS A_WORKFLOW_HISTORY_KEY)
|
||||
vResult := REPLACE(vResult, 'T.' || pKeyColumnName, 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY');
|
||||
|
||||
RETURN vResult;
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Check if this is the key column (e.g., A_ETL_LOAD_SET_KEY_FK) and add alias
|
||||
IF UPPER(vCurrentCol) = UPPER(pKeyColumnName) THEN
|
||||
vCurrentCol := 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSIF UPPER(vCurrentCol) = 'A_ETL_LOAD_SET_KEY' THEN
|
||||
vCurrentCol := 'T.A_ETL_LOAD_SET_KEY AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSE
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END processColumnList;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllColumnsList VARCHAR2(32767);
|
||||
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
-- Build list of all columns for the table (excluding key column to avoid duplication)
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllColumnsList
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName
|
||||
AND column_name != vKeyColumnName;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(vAllColumnsList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Dynamic column list built (excluding key): ' || vAllColumnsList, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list with T. prefix: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values from A_LOAD_HISTORY
|
||||
vSql := 'SELECT DISTINCT L.A_ETL_LOAD_SET_KEY' ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing key values query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValues.COUNT || ' unique key values to process', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value with A_WORKFLOW_HISTORY_KEY mapping
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing key value: ' || vKeyValue || ' (' || (i) || '/' || vKeyValues.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in column list' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(32000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (building dynamic list from table metadata)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth || ' (' || i || '/' || vKeyValuesYear.COUNT || ')', 'DEBUG', vParameters);
|
||||
-- Construct the query to extract data for the current key value
|
||||
-- Note: processColumnList already handles A_WORKFLOW_HISTORY_KEY aliasing
|
||||
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(vKeyValueYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(vKeyValueMonth) || '/' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) || '.parquet';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Parquet export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
|
||||
vKeyValuesYear key_value_tab;
|
||||
vKeyValuesMonth key_value_tab;
|
||||
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValueYear VARCHAR2(4000);
|
||||
vKeyValueMonth VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
|
||||
-- Function to sanitize file names
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Replace any disallowed characters with underscores
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME.csv (without date range)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
DECLARE
|
||||
vColumnName VARCHAR2(128);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Remove spaces and convert to uppercase for processing
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
-- Parse comma-separated column list
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present (e.g., 'T.COLUMN_NAME' -> 'COLUMN_NAME')
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists in the table
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END;
|
||||
END IF;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (using dynamic column list)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Fetch unique year/month combinations
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
' ;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Date range: ' || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || ' to ' || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique year/month combination
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vKeyValueYear := vKeyValuesYear(i);
|
||||
vKeyValueMonth := vKeyValuesMonth(i);
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || vProcessedColumnList || '
|
||||
FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || vKeyValueYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || vKeyValueMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI for the CSV file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileBaseName) || '_' ||
|
||||
sanitizeFilename(vKeyValueYear) || sanitizeFilename(vKeyValueMonth) ||
|
||||
vFileExtension;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Exporting to CSV file: ' || vUri, 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || vKeyValueYear || '/' || vKeyValueMonth || ' (' || i || '/' || vKeyValuesYear.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('File name pattern: ' || vFileBaseName || '_' || vKeyValueYear || vKeyValueMonth || vFileExtension, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to CSV file
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vKeyValuesYear.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,165 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.1.1';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-12-04 13:10:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,730 @@
|
||||
create or replace PACKAGE BODY CT_MRDS.DATA_EXPORTER
|
||||
AS
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- PRIVATE HELPER FUNCTIONS (USED BY MULTIPLE PROCEDURES)
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Sanitizes filename by replacing disallowed characters with underscores
|
||||
**/
|
||||
FUNCTION sanitizeFilename(pFilename IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vFilename VARCHAR2(1000);
|
||||
BEGIN
|
||||
vFilename := REGEXP_REPLACE(pFilename, '[^a-zA-Z0-9._-]', '_');
|
||||
RETURN vFilename;
|
||||
END sanitizeFilename;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
-- Internal shared function to process column list with T. prefix and key column mapping
|
||||
FUNCTION processColumnList(pColumnList IN VARCHAR2, pTableName IN VARCHAR2, pSchemaName IN VARCHAR2, pKeyColumnName IN VARCHAR2) RETURN VARCHAR2 IS
|
||||
vResult VARCHAR2(32767);
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllCols VARCHAR2(32767);
|
||||
BEGIN
|
||||
IF pColumnList IS NULL THEN
|
||||
-- Build list of all columns
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllCols
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
-- Add T. prefix to all columns
|
||||
vResult := 'T.' || REPLACE(vAllCols, ', ', ', T.');
|
||||
|
||||
-- Replace key column with aliased version (e.g., T.A_ETL_LOAD_SET_KEY_FK AS A_WORKFLOW_HISTORY_KEY)
|
||||
vResult := REPLACE(vResult, 'T.' || pKeyColumnName, 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY');
|
||||
|
||||
RETURN vResult;
|
||||
END IF;
|
||||
|
||||
-- Remove extra spaces and convert to uppercase
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
vResult := '';
|
||||
|
||||
-- Parse comma-separated column list and add T. prefix
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Check if this is the key column (e.g., A_ETL_LOAD_SET_KEY_FK) and add alias
|
||||
IF UPPER(vCurrentCol) = UPPER(pKeyColumnName) THEN
|
||||
vCurrentCol := 'T.' || pKeyColumnName || ' AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSIF UPPER(vCurrentCol) = 'A_ETL_LOAD_SET_KEY' THEN
|
||||
vCurrentCol := 'T.A_ETL_LOAD_SET_KEY AS A_WORKFLOW_HISTORY_KEY';
|
||||
ELSE
|
||||
-- Add T. prefix if not already present
|
||||
IF INSTR(vCurrentCol, '.') = 0 THEN
|
||||
vCurrentCol := 'T.' || vCurrentCol;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Add to result with comma separator
|
||||
IF vResult IS NOT NULL THEN
|
||||
vResult := vResult || ', ';
|
||||
END IF;
|
||||
vResult := vResult || vCurrentCol;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
|
||||
RETURN vResult;
|
||||
END processColumnList;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Validates table existence, key column existence, and column list
|
||||
**/
|
||||
PROCEDURE VALIDATE_TABLE_AND_COLUMNS (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2,
|
||||
pParameters IN VARCHAR2
|
||||
) IS
|
||||
vCount INTEGER;
|
||||
vColumns VARCHAR2(32767);
|
||||
vPos PLS_INTEGER;
|
||||
vNextPos PLS_INTEGER;
|
||||
vCurrentCol VARCHAR2(128);
|
||||
BEGIN
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = pTableName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND column_name = pKeyColumnName
|
||||
AND owner = pSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Validate pColumnList - check if all column names exist in the table
|
||||
IF pColumnList IS NOT NULL THEN
|
||||
vColumns := UPPER(REPLACE(pColumnList, ' ', ''));
|
||||
vPos := 1;
|
||||
|
||||
WHILE vPos <= LENGTH(vColumns) LOOP
|
||||
vNextPos := INSTR(vColumns, ',', vPos);
|
||||
IF vNextPos = 0 THEN
|
||||
vNextPos := LENGTH(vColumns) + 1;
|
||||
END IF;
|
||||
|
||||
vCurrentCol := SUBSTR(vColumns, vPos, vNextPos - vPos);
|
||||
|
||||
-- Remove table alias prefix if present
|
||||
IF INSTR(vCurrentCol, '.') > 0 THEN
|
||||
vCurrentCol := SUBSTR(vCurrentCol, INSTR(vCurrentCol, '.') + 1);
|
||||
END IF;
|
||||
|
||||
-- Check if column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = pTableName
|
||||
AND column_name = vCurrentCol
|
||||
AND owner = pSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
vPos := vNextPos + 1;
|
||||
END LOOP;
|
||||
END IF;
|
||||
END VALIDATE_TABLE_AND_COLUMNS;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Retrieves list of year/month partitions based on date range
|
||||
**/
|
||||
FUNCTION GET_PARTITIONS (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pMinDate IN DATE,
|
||||
pMaxDate IN DATE,
|
||||
pParameters IN VARCHAR2
|
||||
) RETURN partition_tab IS
|
||||
vSql VARCHAR2(32000);
|
||||
vPartitions partition_tab;
|
||||
vKeyValuesYear DBMS_SQL.VARCHAR2_TABLE;
|
||||
vKeyValuesMonth DBMS_SQL.VARCHAR2_TABLE;
|
||||
vFullTableName VARCHAR2(200);
|
||||
BEGIN
|
||||
-- Build fully qualified table name if not already qualified
|
||||
IF INSTR(pTableName, '.') > 0 THEN
|
||||
vFullTableName := pTableName; -- Already fully qualified
|
||||
ELSE
|
||||
vFullTableName := pSchemaName || '.' || pTableName;
|
||||
END IF;
|
||||
|
||||
vSql := 'SELECT DISTINCT TO_CHAR(L.LOAD_START,''YYYY'') AS YR, TO_CHAR(L.LOAD_START,''MM'') AS MN
|
||||
FROM ' || vFullTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || pKeyColumnName || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND L.LOAD_START >= :pMinDate
|
||||
AND L.LOAD_START < :pMaxDate
|
||||
ORDER BY YR, MN';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing date range query: ' || vSql, 'DEBUG', pParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValuesYear, vKeyValuesMonth USING pMinDate, pMaxDate;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValuesYear.COUNT || ' year/month combinations to export', 'DEBUG', pParameters);
|
||||
|
||||
-- Convert to partition_tab
|
||||
vPartitions := partition_tab();
|
||||
vPartitions.EXTEND(vKeyValuesYear.COUNT);
|
||||
FOR i IN 1 .. vKeyValuesYear.COUNT LOOP
|
||||
vPartitions(i).year := vKeyValuesYear(i);
|
||||
vPartitions(i).month := vKeyValuesMonth(i);
|
||||
END LOOP;
|
||||
|
||||
RETURN vPartitions;
|
||||
END GET_PARTITIONS;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Exports single partition (year/month) to specified format (PARQUET or CSV)
|
||||
* This is the core worker procedure that will be used for parallel processing in v2.3.0
|
||||
**/
|
||||
PROCEDURE EXPORT_SINGLE_PARTITION (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pYear IN VARCHAR2,
|
||||
pMonth IN VARCHAR2,
|
||||
pBucketUri IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pProcessedColumns IN VARCHAR2,
|
||||
pMinDate IN DATE,
|
||||
pMaxDate IN DATE,
|
||||
pCredentialName IN VARCHAR2,
|
||||
pFormat IN VARCHAR2 DEFAULT 'PARQUET',
|
||||
pFileBaseName IN VARCHAR2 DEFAULT NULL,
|
||||
pParameters IN VARCHAR2
|
||||
) IS
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vFileName VARCHAR2(1000);
|
||||
vFullTableName VARCHAR2(200);
|
||||
BEGIN
|
||||
-- Build fully qualified table name if not already qualified
|
||||
IF INSTR(pTableName, '.') > 0 THEN
|
||||
vFullTableName := pTableName; -- Already fully qualified
|
||||
ELSE
|
||||
vFullTableName := pSchemaName || '.' || pTableName;
|
||||
END IF;
|
||||
|
||||
-- Construct the query to extract data for the current year/month
|
||||
vQuery := 'SELECT ' || pProcessedColumns || '
|
||||
FROM ' || vFullTableName || ' T, CT_ODS.A_LOAD_HISTORY L
|
||||
WHERE T.' || pKeyColumnName || ' = L.A_ETL_LOAD_SET_KEY
|
||||
AND TO_CHAR(L.LOAD_START,''YYYY'') = ' || CHR(39) || pYear || CHR(39) || '
|
||||
AND TO_CHAR(L.LOAD_START,''MM'') = ' || CHR(39) || pMonth || CHR(39) || '
|
||||
AND L.LOAD_START >= TO_DATE(' || CHR(39) || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')
|
||||
AND L.LOAD_START < TO_DATE(' || CHR(39) || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS') || CHR(39) || ', ''YYYY-MM-DD HH24:MI:SS'')';
|
||||
|
||||
-- Construct the URI based on format
|
||||
IF pFormat = 'PARQUET' THEN
|
||||
-- Parquet: Use Hive-style partitioning
|
||||
vUri := pBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
'PARTITION_YEAR=' || sanitizeFilename(pYear) || '/' ||
|
||||
'PARTITION_MONTH=' || sanitizeFilename(pMonth) || '/' ||
|
||||
sanitizeFilename(pYear) || sanitizeFilename(pMonth) || '.parquet';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Parquet export URI: ' || vUri, 'DEBUG', pParameters);
|
||||
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'parquet')
|
||||
);
|
||||
ELSIF pFormat = 'CSV' THEN
|
||||
-- CSV: Flat file structure with year/month in filename
|
||||
vFileName := NVL(pFileBaseName, UPPER(pTableName)) || '_' || pYear || pMonth || '.csv';
|
||||
vUri := pBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vFileName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('CSV export URI: ' || vUri, 'DEBUG', pParameters);
|
||||
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Unsupported format: ' || pFormat || '. Use PARQUET or CSV.');
|
||||
END IF;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing Year/Month: ' || pYear || '/' || pMonth || ' (Format: ' || pFormat || ')', 'DEBUG', pParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', pParameters);
|
||||
END EXPORT_SINGLE_PARTITION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
-- Type definition for key values
|
||||
TYPE key_value_tab IS TABLE OF VARCHAR2(4000);
|
||||
vKeyValues key_value_tab;
|
||||
vCount INTEGER;
|
||||
vSql VARCHAR2(4000);
|
||||
vKeyValue VARCHAR2(4000);
|
||||
vQuery VARCHAR2(32767);
|
||||
vUri VARCHAR2(4000);
|
||||
vDataType VARCHAR2(30);
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters VARCHAR2(4000);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vAllColumnsList VARCHAR2(32767);
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Check if table exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tables
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, ENV_MANAGER.MSG_TABLE_NOT_EXISTS);
|
||||
END IF;
|
||||
|
||||
-- Check if key column exists
|
||||
SELECT COUNT(*) INTO vCount
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, ENV_MANAGER.MSG_COLUMN_NOT_EXISTS);
|
||||
|
||||
END IF;
|
||||
|
||||
-- Get the data type of the key column
|
||||
SELECT data_type INTO vDataType
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND column_name = vKeyColumnName
|
||||
AND owner = vSchemaName;
|
||||
|
||||
-- Build list of all columns for the table (excluding key column to avoid duplication)
|
||||
SELECT LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY column_id)
|
||||
INTO vAllColumnsList
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = vTableName
|
||||
AND owner = vSchemaName
|
||||
AND column_name != vKeyColumnName;
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(vAllColumnsList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Dynamic column list built (excluding key): ' || vAllColumnsList, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list with T. prefix: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
-- Fetch unique key values from A_LOAD_HISTORY
|
||||
vSql := 'SELECT DISTINCT L.A_ETL_LOAD_SET_KEY' ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Executing key values query: ' || vSql, 'DEBUG', vParameters);
|
||||
EXECUTE IMMEDIATE vSql BULK COLLECT INTO vKeyValues;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vKeyValues.COUNT || ' unique key values to process', 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each unique key value
|
||||
FOR i IN 1 .. vKeyValues.COUNT LOOP
|
||||
vKeyValue := vKeyValues(i);
|
||||
|
||||
-- Construct the query to extract data for the current key value with A_WORKFLOW_HISTORY_KEY mapping
|
||||
IF vDataType IN ('VARCHAR2', 'CHAR', 'NCHAR', 'NVARCHAR2') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || CHR(39) || vKeyValue || CHR(39);
|
||||
ELSIF vDataType IN ('NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE') THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = ' || vKeyValue;
|
||||
ELSIF vDataType LIKE 'TIMESTAMP%' OR vDataType = 'DATE' THEN
|
||||
vQuery := 'SELECT ' || vProcessedColumnList ||
|
||||
' FROM ' || vTableName || ' T, CT_ODS.A_LOAD_HISTORY L' ||
|
||||
' WHERE T.' || DBMS_ASSERT.simple_sql_name(vKeyColumnName) || ' = L.A_ETL_LOAD_SET_KEY' ||
|
||||
' AND L.A_ETL_LOAD_SET_KEY = TO_TIMESTAMP(' || CHR(39) || vKeyValue || CHR(39) ||', ''YYYY-MM-DD HH24:MI:SS.FF'')';
|
||||
ELSE
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE);
|
||||
END IF;
|
||||
|
||||
-- Construct the URI for the file in OCI Object Storage
|
||||
vUri := vBucketUri ||
|
||||
CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END ||
|
||||
sanitizeFilename(vKeyValue) || '.csv';
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processing key value: ' || vKeyValue || ' (' || (i) || '/' || vKeyValues.COUNT || ')', 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export query: ' || vQuery, 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export URI: ' || vUri, 'DEBUG', vParameters);
|
||||
|
||||
-- Use DBMS_CLOUD package to export data to the URI
|
||||
DBMS_CLOUD.EXPORT_DATA(
|
||||
credential_name => pCredentialName,
|
||||
file_uri_list => vUri,
|
||||
query => vQuery,
|
||||
format => json_object('type' VALUE 'CSV', 'header' VALUE true)
|
||||
);
|
||||
END LOOP;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in column list' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_UNSUPPORTED_DATA_TYPE THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_UNSUPPORTED_DATA_TYPE || ' vDataType: '||vDataType;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNSUPPORTED_DATA_TYPE, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vPartitions partition_tab;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Validate table, key column, and column list using shared procedure
|
||||
VALIDATE_TABLE_AND_COLUMNS(vSchemaName, vTableName, vKeyColumnName, pColumnList, vParameters);
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (building dynamic list from table metadata)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Get partitions using shared function
|
||||
vPartitions := GET_PARTITIONS(vSchemaName, vTableName, vKeyColumnName, pMinDate, pMaxDate, vParameters);
|
||||
|
||||
-- Loop over each partition and export using shared worker procedure
|
||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||
EXPORT_SINGLE_PARTITION(
|
||||
pSchemaName => vSchemaName,
|
||||
pTableName => vTableName,
|
||||
pKeyColumnName => vKeyColumnName,
|
||||
pYear => vPartitions(i).year,
|
||||
pMonth => vPartitions(i).month,
|
||||
pBucketUri => vBucketUri,
|
||||
pFolderName => pFolderName,
|
||||
pProcessedColumns => vProcessedColumnList,
|
||||
pMinDate => pMinDate,
|
||||
pMaxDate => pMaxDate,
|
||||
pCredentialName => pCredentialName,
|
||||
pFormat => 'PARQUET',
|
||||
pFileBaseName => NULL,
|
||||
pParameters => vParameters
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to a single CSV file with date filtering.
|
||||
* Unlike EXPORT_TABLE_DATA_BY_DATE, this procedure creates one CSV file
|
||||
* instead of multiple Parquet files partitioned by year/month.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY.
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
)
|
||||
IS
|
||||
vTableName VARCHAR2(128);
|
||||
vSchemaName VARCHAR2(128);
|
||||
vKeyColumnName VARCHAR2(128);
|
||||
vParameters CT_MRDS.A_PROCESS_LOG.PROCEDURE_PARAMETERS%TYPE;
|
||||
vFileBaseName VARCHAR2(4000);
|
||||
vFileExtension VARCHAR2(10);
|
||||
vProcessedColumnList VARCHAR2(32767);
|
||||
vBucketUri VARCHAR2(4000);
|
||||
vCurrentCol VARCHAR2(128);
|
||||
vPartitions partition_tab;
|
||||
|
||||
BEGIN
|
||||
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
|
||||
,'pTableName => '''||nvl(pTableName, 'NULL')||''''
|
||||
,'pKeyColumnName => '''||nvl(pKeyColumnName, 'NULL')||''''
|
||||
,'pBucketArea => '''||nvl(pBucketArea, 'NULL')||''''
|
||||
,'pFolderName => '''||nvl(pFolderName, 'NULL')||''''
|
||||
,'pFileName => '''||nvl(pFileName, 'NULL')||''''
|
||||
,'pColumnList => '''||nvl(pColumnList, 'NULL')||''''
|
||||
,'pMinDate => '''||nvl(TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pMaxDate => '''||nvl(TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'NULL')||''''
|
||||
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
|
||||
));
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
|
||||
|
||||
-- Get bucket URI based on bucket area using FILE_MANAGER function
|
||||
vBucketUri := FILE_MANAGER.GET_BUCKET_URI(pBucketArea);
|
||||
|
||||
-- Convert table and column names to uppercase to match data dictionary
|
||||
vTableName := UPPER(pTableName);
|
||||
vSchemaName := UPPER(pSchemaName);
|
||||
vKeyColumnName := UPPER(pKeyColumnName);
|
||||
|
||||
-- Extract base filename and extension or construct default filename
|
||||
IF pFileName IS NOT NULL THEN
|
||||
-- Use provided filename
|
||||
IF INSTR(pFileName, '.') > 0 THEN
|
||||
vFileBaseName := SUBSTR(pFileName, 1, INSTR(pFileName, '.', -1) - 1);
|
||||
vFileExtension := SUBSTR(pFileName, INSTR(pFileName, '.', -1));
|
||||
ELSE
|
||||
vFileBaseName := pFileName;
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
ELSE
|
||||
-- Construct default filename: TABLENAME (without extension, will be added by worker)
|
||||
vFileBaseName := UPPER(pTableName);
|
||||
vFileExtension := '.csv';
|
||||
END IF;
|
||||
|
||||
-- Validate table, key column, and column list using shared procedure
|
||||
VALIDATE_TABLE_AND_COLUMNS(vSchemaName, vTableName, vKeyColumnName, pColumnList, vParameters);
|
||||
|
||||
-- Process column list to add T. prefix to each column
|
||||
vProcessedColumnList := processColumnList(pColumnList, vTableName, vSchemaName, vKeyColumnName);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Input column list: ' || NVL(pColumnList, 'NULL (using dynamic column list)'), 'DEBUG', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Processed column list: ' || vProcessedColumnList, 'DEBUG', vParameters);
|
||||
|
||||
vTableName := DBMS_ASSERT.SCHEMA_NAME(vSchemaName) || '.' || DBMS_ASSERT.simple_sql_name(vTableName);
|
||||
|
||||
-- Get partitions using shared function
|
||||
vPartitions := GET_PARTITIONS(vSchemaName, vTableName, vKeyColumnName, pMinDate, pMaxDate, vParameters);
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Found ' || vPartitions.COUNT || ' year/month combinations to export', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Date range: ' || TO_CHAR(pMinDate, 'YYYY-MM-DD HH24:MI:SS') || ' to ' || TO_CHAR(pMaxDate, 'YYYY-MM-DD HH24:MI:SS'), 'DEBUG', vParameters);
|
||||
|
||||
-- Loop over each partition and export using shared worker procedure
|
||||
FOR i IN 1 .. vPartitions.COUNT LOOP
|
||||
EXPORT_SINGLE_PARTITION(
|
||||
pSchemaName => vSchemaName,
|
||||
pTableName => vTableName,
|
||||
pKeyColumnName => vKeyColumnName,
|
||||
pYear => vPartitions(i).year,
|
||||
pMonth => vPartitions(i).month,
|
||||
pBucketUri => vBucketUri,
|
||||
pFolderName => pFolderName,
|
||||
pProcessedColumns => vProcessedColumnList,
|
||||
pMinDate => pMinDate,
|
||||
pMaxDate => pMaxDate,
|
||||
pCredentialName => pCredentialName,
|
||||
pFormat => 'CSV',
|
||||
pFileBaseName => vFileBaseName,
|
||||
pParameters => vParameters
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vPartitions.COUNT || ' files', 'INFO', vParameters);
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);
|
||||
|
||||
EXCEPTION
|
||||
WHEN ENV_MANAGER.ERR_TABLE_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_TABLE_NOT_EXISTS ||': '||vTableName;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_TABLE_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN ENV_MANAGER.ERR_COLUMN_NOT_EXISTS THEN
|
||||
vgMsgTmp := ENV_MANAGER.MSG_COLUMN_NOT_EXISTS || ' (TableName.ColumnName): ' || vTableName||'.'||vKeyColumnName||CASE WHEN vCurrentCol IS NOT NULL THEN '.'||vCurrentCol||' in pColumnList' ELSE '' END;
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(vgMsgTmp, 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_COLUMN_NOT_EXISTS, vgMsgTmp);
|
||||
WHEN OTHERS THEN
|
||||
-- Log complete error details including full stack trace and backtrace
|
||||
ENV_MANAGER.LOG_PROCESS_ERROR('Export failed: ' || SQLERRM, vParameters, 'DATA_EXPORTER');
|
||||
ENV_MANAGER.LOG_PROCESS_EVENT(ENV_MANAGER.GET_ERROR_STACK(pFormat => 'TABLE', pCode=> SQLCODE), 'ERROR', vParameters);
|
||||
RAISE_APPLICATION_ERROR(ENV_MANAGER.CODE_UNKNOWN, ENV_MANAGER.GET_ERROR_STACK(pFormat => 'OUTPUT', pCode=> SQLCODE));
|
||||
|
||||
END EXPORT_TABLE_DATA_TO_CSV_BY_DATE;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN PACKAGE_VERSION;
|
||||
END GET_VERSION;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.GET_PACKAGE_VERSION_INFO(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersion => PACKAGE_VERSION,
|
||||
pBuildDate => PACKAGE_BUILD_DATE,
|
||||
pAuthor => PACKAGE_AUTHOR
|
||||
);
|
||||
END GET_BUILD_INFO;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2 IS
|
||||
BEGIN
|
||||
RETURN ENV_MANAGER.FORMAT_VERSION_HISTORY(
|
||||
pPackageName => 'DATA_EXPORTER',
|
||||
pVersionHistory => VERSION_HISTORY
|
||||
);
|
||||
END GET_VERSION_HISTORY;
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,183 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.2.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-12-19 16:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10) ||
|
||||
'v1.0.0 (2025-09-15): Initial implementation within FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,209 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.3.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2025-12-20 10:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
|
||||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,213 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.4.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2026-01-09 14:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.4.0 (2026-01-09): Added Smart Column Mapping for CSV exports. New optional parameters pTargetTableOwner and pTargetTableName enable automatic column order mapping from source table to external table structure, solving Oracle External Tables CSV positional mapping issue. Backward compatible (works without new parameters).' || CHR(10) ||
|
||||
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
|
||||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTargetTableOwner IN VARCHAR2 default NULL,
|
||||
pTargetTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTargetTableOwner IN VARCHAR2 default NULL,
|
||||
pTargetTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,213 @@
|
||||
create or replace PACKAGE CT_MRDS.DATA_EXPORTER
|
||||
AUTHID CURRENT_USER
|
||||
AS
|
||||
/**
|
||||
* Data Export Package: Provides comprehensive data export capabilities to various formats (CSV, Parquet)
|
||||
* with support for cloud storage integration via Oracle Cloud Infrastructure (OCI).
|
||||
* The structure of comment is used by GET_PACKAGE_DOCUMENTATION function
|
||||
* which returns documentation text for confluence page (to Copy-Paste it).
|
||||
**/
|
||||
|
||||
-- Package Version Information
|
||||
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.4.0';
|
||||
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(19) := '2026-01-11 18:00:00';
|
||||
PACKAGE_AUTHOR CONSTANT VARCHAR2(50) := 'MRDS Development Team';
|
||||
|
||||
-- Version History (last 3-5 changes)
|
||||
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
|
||||
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || CHR(10) ||
|
||||
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
|
||||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
|
||||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
|
||||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
|
||||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10);
|
||||
|
||||
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
|
||||
vgMsgTmp VARCHAR2(32000);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- TYPE DEFINITIONS FOR PARTITION HANDLING
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Record type for year/month partition information
|
||||
**/
|
||||
TYPE partition_rec IS RECORD (
|
||||
year VARCHAR2(4),
|
||||
month VARCHAR2(2)
|
||||
);
|
||||
|
||||
/**
|
||||
* Table type for collection of partition records
|
||||
**/
|
||||
TYPE partition_tab IS TABLE OF partition_rec;
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- INTERNAL PARALLEL PROCESSING CALLBACK
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_PARTITION_PARALLEL
|
||||
* @desc Internal callback procedure for DBMS_PARALLEL_EXECUTE.
|
||||
* Processes single partition (year/month) chunk in parallel task.
|
||||
* Called by DBMS_PARALLEL_EXECUTE framework for each chunk.
|
||||
* This procedure is PUBLIC because DBMS_PARALLEL_EXECUTE requires it,
|
||||
* but should NOT be called directly by external code.
|
||||
* @param pStartId - Chunk start ID (CHUNK_ID from A_PARALLEL_EXPORT_CHUNKS table)
|
||||
* @param pEndId - Chunk end ID (same as pStartId for single-row chunks)
|
||||
**/
|
||||
PROCEDURE EXPORT_PARTITION_PARALLEL (
|
||||
pStartId IN NUMBER,
|
||||
pEndId IN NUMBER
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- MAIN EXPORT PROCEDURES
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into CSV file on OCI infrustructure.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'csv_exports'
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_BY_DATE
|
||||
* @desc Wrapper procedure for DBMS_CLOUD.EXPORT_DATA.
|
||||
* Exports data into PARQUET files on OCI infrustructure.
|
||||
* Each YEAR_MONTH pair goes to seperate file (implicit partitioning).
|
||||
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
|
||||
* Validates that all columns in pColumnList exist in the target table.
|
||||
* Automatically adds 'T.' prefix to column names in pColumnList.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (default 1, range 1-16).
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* @example
|
||||
* begin
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'parquet_exports',
|
||||
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @name EXPORT_TABLE_DATA_TO_CSV_BY_DATE
|
||||
* @desc Exports data to separate CSV files partitioned by year and month.
|
||||
* Creates one CSV file for each year/month combination found in the data.
|
||||
* Uses the same date filtering mechanism with CT_ODS.A_LOAD_HISTORY as EXPORT_TABLE_DATA_BY_DATE,
|
||||
* but exports to CSV format instead of Parquet.
|
||||
* Supports parallel partition processing via pParallelDegree parameter (1-16).
|
||||
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
|
||||
* @example
|
||||
* begin
|
||||
* -- With custom filename
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'CT_MRDS',
|
||||
* pTableName => 'MY_TABLE',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'DATA',
|
||||
* pFolderName => 'exports',
|
||||
* pFileName => 'my_export.csv',
|
||||
* pMinDate => DATE '2024-01-01',
|
||||
* pMaxDate => SYSDATE,
|
||||
* pParallelDegree => 8 -- Optional, default 1, range 1-16
|
||||
* );
|
||||
*
|
||||
* -- With auto-generated filename (based on table name only)
|
||||
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
* pSchemaName => 'OU_TOP',
|
||||
* pTableName => 'AGGREGATED_ALLOTMENT',
|
||||
* pKeyColumnName => 'A_ETL_LOAD_SET_KEY_FK',
|
||||
* pBucketArea => 'ARCHIVE',
|
||||
* pFolderName => 'exports',
|
||||
* pMinDate => DATE '2025-09-01',
|
||||
* pMaxDate => DATE '2025-09-17'
|
||||
* );
|
||||
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
|
||||
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'
|
||||
* end;
|
||||
**/
|
||||
PROCEDURE EXPORT_TABLE_DATA_TO_CSV_BY_DATE (
|
||||
pSchemaName IN VARCHAR2,
|
||||
pTableName IN VARCHAR2,
|
||||
pKeyColumnName IN VARCHAR2,
|
||||
pBucketArea IN VARCHAR2,
|
||||
pFolderName IN VARCHAR2,
|
||||
pFileName IN VARCHAR2 DEFAULT NULL,
|
||||
pColumnList IN VARCHAR2 default NULL,
|
||||
pMinDate IN DATE default DATE '1900-01-01',
|
||||
pMaxDate IN DATE default SYSDATE,
|
||||
pParallelDegree IN NUMBER default 1,
|
||||
pTemplateTableName IN VARCHAR2 default NULL,
|
||||
pMaxFileSize IN NUMBER default 104857600,
|
||||
pCredentialName IN VARCHAR2 default ENV_MANAGER.gvCredentialName
|
||||
);
|
||||
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
-- VERSION MANAGEMENT FUNCTIONS
|
||||
---------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the current package version number
|
||||
* return: Version string in format X.Y.Z (e.g., '2.1.0')
|
||||
**/
|
||||
FUNCTION GET_VERSION RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns comprehensive build information including version, date, and author
|
||||
* return: Formatted string with complete build details
|
||||
**/
|
||||
FUNCTION GET_BUILD_INFO RETURN VARCHAR2;
|
||||
|
||||
/**
|
||||
* Returns the version history with recent changes
|
||||
* return: Multi-line string with version history
|
||||
**/
|
||||
FUNCTION GET_VERSION_HISTORY RETURN VARCHAR2;
|
||||
|
||||
END;
|
||||
|
||||
/
|
||||
@@ -0,0 +1,87 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
package_name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- Add or remove entries as needed for your MARS issue
|
||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.ENV_MANAGER',
|
||||
'CT_MRDS.DATA_EXPORTER'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
ELSE
|
||||
vOwner := USER; -- Default to current user if no schema specified
|
||||
vPackageName := vPackageList(i);
|
||||
END IF;
|
||||
|
||||
BEGIN
|
||||
-- Get package version
|
||||
EXECUTE IMMEDIATE
|
||||
'SELECT ' || vOwner || '.' || vPackageName || '.GET_VERSION() FROM DUAL'
|
||||
INTO vVersion;
|
||||
|
||||
-- Track the version
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => NULL, -- Will be retrieved from package
|
||||
pPackageAuthor => NULL -- Will be retrieved from package
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: Tracked ' || vOwner || '.' || vPackageName || ' v' || vVersion);
|
||||
vCount := vCount + 1;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR tracking ' || vOwner || '.' || vPackageName || ': ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Tracked ' || vCount || ' of ' || vPackageList.COUNT || ' packages successfully');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
@@ -0,0 +1,62 @@
|
||||
-- ===================================================================
|
||||
-- Universal Package Version Verification Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Verify all tracked Oracle packages for code changes
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-19
|
||||
-- Version: 1.0.0
|
||||
--
|
||||
-- USAGE:
|
||||
-- Include at the end of install/rollback scripts: @@verify_packages_version.sql
|
||||
--
|
||||
-- OUTPUT:
|
||||
-- - List of all tracked packages with their current status
|
||||
-- - OK: Package has not changed since last tracking
|
||||
-- - WARNING: Package code changed without version update
|
||||
-- ===================================================================
|
||||
|
||||
SET LINESIZE 200
|
||||
SET PAGESIZE 1000
|
||||
SET FEEDBACK OFF
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Package Version Verification
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
|
||||
COLUMN PACKAGE_OWNER FORMAT A15
|
||||
COLUMN PACKAGE_NAME FORMAT A20
|
||||
COLUMN VERSION FORMAT A10
|
||||
COLUMN STATUS FORMAT A80
|
||||
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION AS VERSION,
|
||||
CT_MRDS.ENV_MANAGER.CHECK_PACKAGE_CHANGES(PACKAGE_OWNER, PACKAGE_NAME) AS STATUS
|
||||
FROM (
|
||||
SELECT
|
||||
PACKAGE_OWNER,
|
||||
PACKAGE_NAME,
|
||||
PACKAGE_VERSION,
|
||||
ROW_NUMBER() OVER (PARTITION BY PACKAGE_OWNER, PACKAGE_NAME ORDER BY TRACKING_DATE DESC) AS RN
|
||||
FROM CT_MRDS.A_PACKAGE_VERSION_TRACKING
|
||||
)
|
||||
WHERE RN = 1
|
||||
ORDER BY PACKAGE_OWNER, PACKAGE_NAME;
|
||||
|
||||
PROMPT
|
||||
PROMPT ========================================
|
||||
PROMPT Verification Complete
|
||||
PROMPT ========================================
|
||||
PROMPT
|
||||
PROMPT Legend:
|
||||
PROMPT OK - Package has not changed since last tracking
|
||||
PROMPT WARNING - Package code changed without version update
|
||||
PROMPT
|
||||
PROMPT For detailed hash information, use:
|
||||
PROMPT SELECT ENV_MANAGER.GET_PACKAGE_HASH_INFO('OWNER', 'PACKAGE') FROM DUAL;
|
||||
PROMPT ========================================
|
||||
|
||||
SET FEEDBACK ON
|
||||
5
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/.gitignore
vendored
Normal file
5
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Exclude temporary folders from version control
|
||||
confluence/
|
||||
log/
|
||||
test/
|
||||
mock_data/
|
||||
@@ -0,0 +1,176 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 00_MARS_835_pre_check_existing_files.sql
|
||||
-- Purpose: Display existing archive files in DATA and HIST buckets before export
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Target Locations: mrds_data_dev/ODS/CSDB/, mrds_hist_dev/ARCHIVE/CSDB/
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Pre-Check: Listing existing CSDB files in DATA and HIST buckets
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalFiles NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vDataFolders t_folder_list;
|
||||
vHistFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential from FILE_MANAGER configuration
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('CHECK TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Credential: ' || vCredentialName);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder lists for CSDB tables
|
||||
-- DATA bucket folders (CSV format - only DEBT and DEBT_DAILY)
|
||||
vDataFolders := t_folder_list(
|
||||
'ODS/CSDB/CSDB_DEBT/',
|
||||
'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
);
|
||||
|
||||
-- HIST bucket folders (Parquet format - all 6 tables)
|
||||
vHistFolders := t_folder_list(
|
||||
'ARCHIVE/CSDB/CSDB_DEBT/',
|
||||
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket (CSV format - last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check DATA bucket folders
|
||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i));
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || vDataFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.csv'
|
||||
ORDER BY created DESC
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalFiles := vTotalFiles + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' No CSV files found (OK - clean folder)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' >> WARNING: Found ' || vFileCount || ' existing CSV files');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Folder does not exist (OK - will be created during export)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR checking folder: ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (Parquet format with Hive partitioning)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check HIST bucket folders
|
||||
FOR i IN 1..vHistFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vHistFolders(i));
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || vHistFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY created DESC
|
||||
FETCH FIRST 5 ROWS ONLY
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalFiles := vTotalFiles + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' No Parquet files found (OK - clean folder)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' >> WARNING: Found Parquet files (showing first 5)');
|
||||
DBMS_OUTPUT.PUT_LINE(' >> Total files in folder: Check bucket manually for complete count');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Folder does not exist (OK - will be created during export)');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR checking folder: ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Pre-Check Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Total files found: ' || vTotalFiles);
|
||||
|
||||
IF vTotalFiles > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('INFO: Existing files detected in buckets');
|
||||
DBMS_OUTPUT.PUT_LINE(' Export will ADD new files to existing ones.');
|
||||
DBMS_OUTPUT.PUT_LINE(' NOTE: Rollback will NOT delete pre-existing files.');
|
||||
DBMS_OUTPUT.PUT_LINE(' Record count verification may show MISMATCH due to pre-existing data.');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('OK: No existing files found - clean export environment');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during pre-check: ' || SQLERRM);
|
||||
DBMS_OUTPUT.PUT_LINE('SQLCODE: ' || SQLCODE);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Pre-check completed
|
||||
PROMPT
|
||||
@@ -0,0 +1,257 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835: Export Group 1 - Split DATA + HIST (DEBT, DEBT_DAILY)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Export last 6 months to DATA bucket (CSV), older data to HIST bucket (Parquet)
|
||||
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
-- Excludes legacy columns not required in new structure
|
||||
-- USES: DATA_EXPORTER v2.4.0 with pTemplateTableName for column order and date formats
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Updated: 2026-01-11 (Updated to DATA_EXPORTER v2.4.0 with pTemplateTableName)
|
||||
-- Related: MARS-835 - CSDB Data Export
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
DEFINE cutoff_date = "ADD_MONTHS(SYSDATE, -6)"
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.DEBT - Split DATA + HIST
|
||||
PROMPT ========================================================================
|
||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT Excluded columns: IDIRDEPOSITORY, VA_BONDDURATION
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- PRE-EXPORT CHECK: List existing files and count records
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vRecordCount NUMBER := 0;
|
||||
vLocationUri VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Get bucket URI for DATA bucket
|
||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT/';
|
||||
|
||||
-- Count existing files
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- List existing files
|
||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||
END LOOP;
|
||||
|
||||
-- Count records in external table
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS' INTO vRecordCount;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Records currently readable via external table: ' || vRecordCount);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
END;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export recent data to DATA bucket (CSV)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to DATA bucket (last 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT',
|
||||
pMaxFileSize => 104857600 -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to DATA bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export historical data to HIST bucket (Parquet)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT data to HIST bucket (older than 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_DEBT_DAILY - Split DATA + HIST
|
||||
PROMPT ========================================================================
|
||||
PROMPT Last 6 months to DATA bucket (CSV format)
|
||||
PROMPT Older data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT Excluded columns: STEPID, PROGRAMNAME, PROGRAMCEILING, PROGRAMSTATUS,
|
||||
PROMPT ISSUERNACE21SECTOR, INSTRUMENTQUOTATIONBASIS
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- PRE-EXPORT CHECK: List existing files and count records
|
||||
DECLARE
|
||||
vFileCount NUMBER := 0;
|
||||
vRecordCount NUMBER := 0;
|
||||
vLocationUri VARCHAR2(1000);
|
||||
BEGIN
|
||||
-- Get bucket URI for DATA bucket
|
||||
vLocationUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA') || 'ODS/CSDB/CSDB_DEBT_DAILY/';
|
||||
|
||||
-- Count existing files
|
||||
SELECT COUNT(*)
|
||||
INTO vFileCount
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'; -- Exclude directories
|
||||
|
||||
IF vFileCount > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: Files already exist in DATA bucket');
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Location: ' || vLocationUri);
|
||||
DBMS_OUTPUT.PUT_LINE('Files found: ' || vFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- List existing files
|
||||
DBMS_OUTPUT.PUT_LINE('Existing files:');
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(last_modified, 'YYYY-MM-DD HH24:MI:SS') AS modified
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'OCI$RESOURCE_PRINCIPAL',
|
||||
location_uri => vLocationUri
|
||||
))
|
||||
WHERE object_name NOT LIKE '%/'
|
||||
ORDER BY object_name
|
||||
) LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' - ' || rec.object_name || ' (' || rec.bytes || ' bytes, ' || rec.modified || ')');
|
||||
END LOOP;
|
||||
|
||||
-- Count records in external table
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT COUNT(*) FROM ODS.CSDB_DEBT_DAILY_ODS' INTO vRecordCount;
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Records currently readable via external table: ' || vRecordCount);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('WARNING: Cannot count records in external table');
|
||||
DBMS_OUTPUT.PUT_LINE('Error: ' || SQLERRM);
|
||||
END;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('===============================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('PRE-EXPORT CHECK: No existing files found in DATA bucket - bucket is clean');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export recent data to DATA bucket (CSV)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table for column order
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to DATA bucket (last 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT_DAILY',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'DATA',
|
||||
pFolderName => 'ODS/CSDB/CSDB_DEBT_DAILY',
|
||||
pMinDate => &cutoff_date,
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY',
|
||||
pMaxFileSize => 104857600 -- 100MB in bytes (safe for parallel execution, avoids ORA-04036)
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to DATA bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
-- Export historical data to HIST bucket (Parquet)
|
||||
-- NEW v2.4.0: Per-column date format handling with template table
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_DEBT_DAILY data to HIST bucket (older than 6 months)...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_DEBT_DAILY');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_DEBT_DAILY',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_DEBT_DAILY',
|
||||
pMaxDate => &cutoff_date,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_DAILY'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_DEBT_DAILY exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Group 1 Export Completed
|
||||
PROMPT ========================================================================
|
||||
PROMPT - LEGACY_DEBT: DATA + HIST exported
|
||||
PROMPT - LEGACY_DEBT_DAILY: DATA + HIST exported
|
||||
PROMPT ========================================================================
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,137 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835: Export Group 2 - HIST Only (4 tables)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Export all data to HIST bucket (Parquet with Hive partitioning)
|
||||
-- Tables: INSTR_RAT_FULL, INSTR_DESC_FULL, ISSUER_RAT_FULL, ISSUER_DESC_FULL
|
||||
-- Applies column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
-- No column exclusions for these tables
|
||||
-- USES: DATA_EXPORTER v2.4.0 with per-column date format handling
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Updated: 2026-01-11 (Updated to DATA_EXPORTER v2.4.0 with pTemplateTableName)
|
||||
-- Related: MARS-835 - CSDB Data Export
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET TIMING ON
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_INSTR_RAT_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_INSTR_RAT_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_INSTR_RAT_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_RAT_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_INSTR_DESC_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_INSTR_DESC_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_INSTR_DESC_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_INSTR_DESC_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_ISSUER_RAT_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_ISSUER_RAT_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_ISSUER_RAT_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_RAT_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Exporting CSDB.LEGACY_ISSUER_DESC_FULL - HIST Only
|
||||
PROMPT ========================================================================
|
||||
PROMPT All data to HIST bucket (Parquet with partitioning)
|
||||
PROMPT Column mapping: A_ETL_LOAD_SET_FK to A_WORKFLOW_HISTORY_KEY
|
||||
PROMPT ========================================================================
|
||||
|
||||
-- NEW v2.4.0: Per-column date format handling via pTemplateTableName
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('Exporting LEGACY_ISSUER_DESC_FULL data to HIST bucket...');
|
||||
DBMS_OUTPUT.PUT_LINE('Using Template Table: CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL');
|
||||
|
||||
CT_MRDS.DATA_EXPORTER.EXPORT_TABLE_DATA_BY_DATE(
|
||||
pSchemaName => 'OU_CSDB',
|
||||
pTableName => 'LEGACY_ISSUER_DESC_FULL',
|
||||
pKeyColumnName => 'A_ETL_LOAD_SET_FK',
|
||||
pBucketArea => 'ARCHIVE',
|
||||
pFolderName => 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL',
|
||||
pMaxDate => SYSDATE,
|
||||
pParallelDegree => 8,
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: LEGACY_ISSUER_DESC_FULL exported to HIST bucket with template column order');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT Group 2 Export Completed
|
||||
PROMPT ========================================================================
|
||||
PROMPT - LEGACY_INSTR_RAT_FULL: HIST exported
|
||||
PROMPT - LEGACY_INSTR_DESC_FULL: HIST exported
|
||||
PROMPT - LEGACY_ISSUER_RAT_FULL: HIST exported
|
||||
PROMPT - LEGACY_ISSUER_DESC_FULL: HIST exported
|
||||
PROMPT ========================================================================
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,193 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 03_MARS_835_verify_exports.sql
|
||||
-- Purpose: Verify exported files exist in DATA and HIST buckets after export
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Target Locations: mrds_data_dev/ODS/CSDB/, mrds_hist_dev/ARCHIVE/CSDB/
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Verification: Listing exported files in DATA and HIST buckets
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vFileCount NUMBER := 0;
|
||||
vTotalDataFiles NUMBER := 0;
|
||||
vTotalHistFiles NUMBER := 0;
|
||||
vTotalDataSize NUMBER := 0;
|
||||
vTotalHistSize NUMBER := 0;
|
||||
|
||||
TYPE t_folder_info IS RECORD (
|
||||
folder_name VARCHAR2(200),
|
||||
table_name VARCHAR2(100),
|
||||
expected_format VARCHAR2(20)
|
||||
);
|
||||
TYPE t_folder_list IS TABLE OF t_folder_info;
|
||||
|
||||
vDataFolders t_folder_list;
|
||||
vHistFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential from FILE_MANAGER
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder lists
|
||||
vDataFolders := t_folder_list(
|
||||
t_folder_info('ODS/CSDB/CSDB_DEBT/', 'DEBT', 'CSV'),
|
||||
t_folder_info('ODS/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'CSV')
|
||||
);
|
||||
|
||||
vHistFolders := t_folder_list(
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT/', 'DEBT', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_DEBT_DAILY/', 'DEBT_DAILY', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/', 'INSTR_RAT_FULL', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/', 'INSTR_DESC_FULL', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/', 'ISSUER_RAT_FULL', 'Parquet'),
|
||||
t_folder_info('ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/', 'ISSUER_DESC_FULL', 'Parquet')
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket Exports (CSV format - last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check DATA bucket exports
|
||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Table: ' || vDataFolders(i).table_name || ' (' || vDataFolders(i).expected_format || ')');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i).folder_name);
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || vDataFolders(i).folder_name
|
||||
))
|
||||
WHERE object_name LIKE '%.csv'
|
||||
ORDER BY created DESC
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalDataFiles := vTotalDataFiles + 1;
|
||||
vTotalDataSize := vTotalDataSize + rec.bytes;
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] No CSV files found - Export may have failed!');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || ' CSV file(s)');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] Cannot access folder - ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket Exports (Parquet with Hive partitioning)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check HIST bucket exports
|
||||
FOR i IN 1..vHistFolders.COUNT LOOP
|
||||
vFileCount := 0;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Table: ' || vHistFolders(i).table_name || ' (' || vHistFolders(i).expected_format || ')');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vHistFolders(i).folder_name);
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
BEGIN
|
||||
FOR rec IN (
|
||||
SELECT object_name, bytes, TO_CHAR(created, 'YYYY-MM-DD HH24:MI:SS') AS created_date
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || vHistFolders(i).folder_name
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
ORDER BY created DESC
|
||||
FETCH FIRST 10 ROWS ONLY
|
||||
) LOOP
|
||||
vFileCount := vFileCount + 1;
|
||||
vTotalHistFiles := vTotalHistFiles + 1;
|
||||
vTotalHistSize := vTotalHistSize + rec.bytes;
|
||||
|
||||
IF vFileCount <= 5 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [' || vFileCount || '] ' || rec.object_name ||
|
||||
' (' || ROUND(rec.bytes/1024/1024, 2) || ' MB) - ' || rec.created_date);
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
IF vFileCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] No Parquet files found - Export may have failed!');
|
||||
ELSIF vFileCount > 5 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || '+ Parquet files (showing first 5)');
|
||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Check Hive partitioning: PARTITION_YEAR=YYYY/PARTITION_MONTH=MM/');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [SUCCESS] Found ' || vFileCount || ' Parquet file(s)');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] Cannot access folder - ' || SQLERRM);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Export Verification Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket (CSV):');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalDataFiles);
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalDataSize/1024/1024/1024, 2) || ' GB');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 2 (DEBT, DEBT_DAILY - last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket (Parquet):');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total files: ' || vTotalHistFiles || '+');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Total size: ' || ROUND(vTotalHistSize/1024/1024/1024, 2) || '+ GB (sample)');
|
||||
DBMS_OUTPUT.PUT_LINE(' - Expected tables: 6 (all CSDB tables with historical data)');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vTotalDataFiles >= 2 AND vTotalHistFiles >= 6 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[SUCCESS] OVERALL STATUS: Export appears SUCCESSFUL');
|
||||
DBMS_OUTPUT.PUT_LINE(' Files found in both DATA and HIST buckets');
|
||||
DBMS_OUTPUT.PUT_LINE(' Proceed to record count verification (Step 4)');
|
||||
ELSIF vTotalDataFiles = 0 AND vTotalHistFiles = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[FAILED] OVERALL STATUS: Export FAILED');
|
||||
DBMS_OUTPUT.PUT_LINE(' No files found in either bucket');
|
||||
DBMS_OUTPUT.PUT_LINE(' Review export logs for errors');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[WARNING] OVERALL STATUS: Partial export detected');
|
||||
DBMS_OUTPUT.PUT_LINE(' Some files missing - review export logs');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during verification: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Export verification completed
|
||||
PROMPT
|
||||
@@ -0,0 +1,201 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 04_MARS_835_verify_record_counts.sql
|
||||
-- Purpose: Verify record counts match between source tables and exported data
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Verification: Compare OU_CSDB source tables with ODS external tables
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK OFF;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Record Count Verification
|
||||
PROMPT =====================================================================================
|
||||
PROMPT Comparing source table counts with exported external table counts
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
TYPE t_table_info IS RECORD (
|
||||
source_schema VARCHAR2(50),
|
||||
source_table VARCHAR2(100),
|
||||
data_external_table VARCHAR2(100),
|
||||
hist_external_table VARCHAR2(100),
|
||||
has_data_export BOOLEAN,
|
||||
has_hist_export BOOLEAN
|
||||
);
|
||||
TYPE t_table_list IS TABLE OF t_table_info;
|
||||
|
||||
vTables t_table_list;
|
||||
vSourceCount NUMBER;
|
||||
vDataCount NUMBER;
|
||||
vHistCount NUMBER;
|
||||
vTotalSourceCount NUMBER := 0;
|
||||
vTotalDataCount NUMBER := 0;
|
||||
vTotalHistCount NUMBER := 0;
|
||||
vMismatchCount NUMBER := 0;
|
||||
vSql VARCHAR2(4000);
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS'));
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize table list with export configuration
|
||||
vTables := t_table_list(
|
||||
t_table_info('OU_CSDB', 'LEGACY_DEBT', 'ODS.CSDB_DEBT_ODS', 'ODS.CSDB_DEBT_ARCHIVE', TRUE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_DEBT_DAILY', 'ODS.CSDB_DEBT_DAILY_ODS', 'ODS.CSDB_DEBT_DAILY_ARCHIVE', TRUE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_RAT_FULL', NULL, 'ODS.CSDB_INSTR_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_INSTR_DESC_FULL', NULL, 'ODS.CSDB_INSTR_DESC_FULL_ARCHIVE', FALSE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_RAT_FULL', NULL, 'ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE', FALSE, TRUE),
|
||||
t_table_info('OU_CSDB', 'LEGACY_ISSUER_DESC_FULL', NULL, 'ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE', FALSE, TRUE)
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('Table Name Source Count DATA Count HIST Count Status');
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
|
||||
FOR i IN 1..vTables.COUNT LOOP
|
||||
-- Get source table count
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).source_schema || '.' || vTables(i).source_table;
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vSourceCount;
|
||||
vTotalSourceCount := vTotalSourceCount + vSourceCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
vSourceCount := -1;
|
||||
DBMS_OUTPUT.PUT_LINE(RPAD(vTables(i).source_table, 24) || 'ERROR: Cannot access source table');
|
||||
CONTINUE;
|
||||
END;
|
||||
|
||||
-- Get DATA external table count (if applicable)
|
||||
IF vTables(i).has_data_export THEN
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).data_external_table;
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vDataCount;
|
||||
vTotalDataCount := vTotalDataCount + vDataCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
vDataCount := -1;
|
||||
END;
|
||||
ELSE
|
||||
vDataCount := NULL;
|
||||
END IF;
|
||||
|
||||
-- Get HIST external table count
|
||||
vSql := 'SELECT COUNT(*) FROM ' || vTables(i).hist_external_table;
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE vSql INTO vHistCount;
|
||||
vTotalHistCount := vTotalHistCount + vHistCount;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
vHistCount := -1;
|
||||
END;
|
||||
|
||||
-- Display results
|
||||
DECLARE
|
||||
vStatus VARCHAR2(20);
|
||||
vDataDisplay VARCHAR2(15);
|
||||
vHistDisplay VARCHAR2(15);
|
||||
BEGIN
|
||||
-- Format DATA count display
|
||||
IF vDataCount IS NULL THEN
|
||||
vDataDisplay := 'N/A';
|
||||
ELSIF vDataCount = -1 THEN
|
||||
vDataDisplay := 'ERROR';
|
||||
ELSE
|
||||
vDataDisplay := TO_CHAR(vDataCount, '999,999,999');
|
||||
END IF;
|
||||
|
||||
-- Format HIST count display
|
||||
IF vHistCount = -1 THEN
|
||||
vHistDisplay := 'ERROR';
|
||||
ELSE
|
||||
vHistDisplay := TO_CHAR(vHistCount, '999,999,999');
|
||||
END IF;
|
||||
|
||||
-- Determine status
|
||||
IF vTables(i).has_data_export THEN
|
||||
-- Split export: check DATA + HIST = SOURCE
|
||||
IF (vDataCount + vHistCount) = vSourceCount THEN
|
||||
vStatus := 'PASS';
|
||||
ELSIF vDataCount = -1 OR vHistCount = -1 THEN
|
||||
vStatus := 'ERROR';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
ELSE
|
||||
vStatus := 'MISMATCH';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
END IF;
|
||||
ELSE
|
||||
-- HIST only: check HIST = SOURCE
|
||||
IF vHistCount = vSourceCount THEN
|
||||
vStatus := 'PASS';
|
||||
ELSIF vHistCount = -1 THEN
|
||||
vStatus := 'ERROR';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
ELSE
|
||||
vStatus := 'MISMATCH';
|
||||
vMismatchCount := vMismatchCount + 1;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE(
|
||||
RPAD(vTables(i).source_table, 24) ||
|
||||
LPAD(TO_CHAR(vSourceCount, '999,999,999'), 13) ||
|
||||
LPAD(vDataDisplay, 13) ||
|
||||
LPAD(vHistDisplay, 13) || ' ' ||
|
||||
vStatus
|
||||
);
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('TOTALS:' || LPAD(TO_CHAR(vTotalSourceCount, '999,999,999'), 17) ||
|
||||
LPAD(TO_CHAR(vTotalDataCount, '999,999,999'), 13) ||
|
||||
LPAD(TO_CHAR(vTotalHistCount, '999,999,999'), 13));
|
||||
DBMS_OUTPUT.PUT_LINE('-------------------------------------------------------------------------------------');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Record Count Verification Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Total source records: ' || TO_CHAR(vTotalSourceCount, '999,999,999'));
|
||||
DBMS_OUTPUT.PUT_LINE('Total DATA records: ' || TO_CHAR(vTotalDataCount, '999,999,999') || ' (last 6 months)');
|
||||
DBMS_OUTPUT.PUT_LINE('Total HIST records: ' || TO_CHAR(vTotalHistCount, '999,999,999') || ' (historical + full exports)');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vMismatchCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[PASS] VERIFICATION PASSED');
|
||||
DBMS_OUTPUT.PUT_LINE(' All record counts match between source and exported data');
|
||||
DBMS_OUTPUT.PUT_LINE(' Export completed successfully');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[INFO] VERIFICATION COMPLETED WITH MISMATCHES');
|
||||
DBMS_OUTPUT.PUT_LINE(' Found ' || vMismatchCount || ' table(s) with count mismatches');
|
||||
DBMS_OUTPUT.PUT_LINE(' NOTE: Mismatches may be caused by pre-existing files in buckets (see pre-check)');
|
||||
DBMS_OUTPUT.PUT_LINE(' Review export logs and pre-check results before re-running exports');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Legend:');
|
||||
DBMS_OUTPUT.PUT_LINE(' PASS - Record counts match (export successful)');
|
||||
DBMS_OUTPUT.PUT_LINE(' MISMATCH - Record counts differ (may be pre-existing files or export issue)');
|
||||
DBMS_OUTPUT.PUT_LINE(' Check pre-check results to identify pre-existing files');
|
||||
DBMS_OUTPUT.PUT_LINE(' ERROR - Cannot access table (may not exist yet)');
|
||||
DBMS_OUTPUT.PUT_LINE(' N/A - Not applicable (table not exported to DATA)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during record count verification: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
SET FEEDBACK ON;
|
||||
|
||||
PROMPT
|
||||
PROMPT Record count verification completed
|
||||
PROMPT
|
||||
@@ -0,0 +1,181 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835 ROLLBACK: Delete Group 1 Exported Files (DEBT, DEBT_DAILY)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Delete exported CSV and Parquet files from DATA and HIST buckets
|
||||
-- WARNING: This will permanently delete exported data files!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting DEBT exported files
|
||||
PROMPT ========================================================================
|
||||
PROMPT WARNING: This will delete files from:
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT/
|
||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from DATA bucket...');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_YYYYMM.csv OR LEGACY_DEBT_YYYYMM_1_20260122T...Z.csv (Oracle timestamp)
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT files from HIST bucket...');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_20260122T...Z.parquet (Oracle timestamp)
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting DEBT_DAILY exported files
|
||||
PROMPT ========================================================================
|
||||
PROMPT WARNING: This will delete files from:
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/
|
||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URIs and credential
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from DATA bucket...');
|
||||
|
||||
-- Delete CSV files from DATA bucket (only files matching export pattern)
|
||||
-- Pattern matches: LEGACY_DEBT_DAILY_YYYYMM.csv OR LEGACY_DEBT_DAILY_YYYYMM_1_timestamp.csv
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE 'LEGACY_DEBT_DAILY_%'
|
||||
AND object_name LIKE '%.csv'
|
||||
AND REGEXP_LIKE(object_name, '^LEGACY_DEBT_DAILY_[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.csv$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vDataBucketUri || 'ODS/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting DEBT_DAILY files from HIST bucket...');
|
||||
|
||||
-- Delete Parquet files from HIST bucket (only files matching export pattern)
|
||||
-- Pattern matches: YYYYMM.parquet OR YYYYMM_1_timestamp.parquet
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vHistBucketUri || 'ARCHIVE/CSDB/CSDB_DEBT_DAILY/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: DEBT_DAILY files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT SUCCESS: Group 1 rollback completed
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,193 @@
|
||||
--=============================================================================================================================
|
||||
-- MARS-835 ROLLBACK: Delete Group 2 Exported Files (4 HIST-only tables)
|
||||
--=============================================================================================================================
|
||||
-- Purpose: Delete exported Parquet files from HIST bucket
|
||||
-- WARNING: This will permanently delete exported data files!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Related: MARS-835 - CSDB Data Export Rollback
|
||||
--=============================================================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting INSTR_RAT_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting INSTR_RAT_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_RAT_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting INSTR_DESC_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting INSTR_DESC_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: INSTR_DESC_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting ISSUER_RAT_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting ISSUER_RAT_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_RAT_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT ========================================================================
|
||||
PROMPT ROLLBACK: Deleting ISSUER_DESC_FULL exported files
|
||||
PROMPT ========================================================================
|
||||
|
||||
DECLARE
|
||||
vBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
BEGIN
|
||||
-- Get bucket URI and credential
|
||||
vBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('Deleting ISSUER_DESC_FULL files from HIST bucket...');
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||
))
|
||||
WHERE object_name LIKE '%PARTITION_YEAR=%' -- Hive-style partitioning folders
|
||||
AND object_name LIKE '%.parquet'
|
||||
AND REGEXP_LIKE(object_name, '[0-9]{6}(_[0-9]+_[0-9]{8}T[0-9]{6,}Z)?\.parquet$') -- YYYYMM or YYYYMM_1_timestamp
|
||||
) LOOP
|
||||
BEGIN
|
||||
DBMS_CLOUD.DELETE_OBJECT(
|
||||
credential_name => vCredentialName,
|
||||
object_uri => vBucketUri || 'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/' || rec.object_name
|
||||
);
|
||||
DBMS_OUTPUT.PUT_LINE(' Deleted: ' || rec.object_name);
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' Skipped (not found): ' || rec.object_name);
|
||||
ELSE
|
||||
RAISE;
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('SUCCESS: ISSUER_DESC_FULL files deleted');
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT SUCCESS: Group 2 rollback completed
|
||||
|
||||
--=============================================================================================================================
|
||||
-- End of Script
|
||||
--=============================================================================================================================
|
||||
@@ -0,0 +1,176 @@
|
||||
-- =====================================================================================
|
||||
-- Script: 99_MARS_835_verify_rollback.sql
|
||||
-- Purpose: Verify all exported files have been deleted from DATA and HIST buckets
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Created: 2025-12-17
|
||||
-- MARS Issue: MARS-835
|
||||
-- Verification: Confirm complete rollback (no CSDB files remaining)
|
||||
-- =====================================================================================
|
||||
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED;
|
||||
SET FEEDBACK ON;
|
||||
SET VERIFY OFF;
|
||||
SET LINESIZE 200;
|
||||
|
||||
PROMPT =====================================================================================
|
||||
PROMPT MARS-835 Rollback Verification
|
||||
PROMPT =====================================================================================
|
||||
PROMPT Checking that all CSDB export files have been deleted
|
||||
PROMPT =====================================================================================
|
||||
|
||||
DECLARE
|
||||
vDataBucketUri VARCHAR2(500);
|
||||
vHistBucketUri VARCHAR2(500);
|
||||
vCredentialName VARCHAR2(100);
|
||||
vDataFileCount NUMBER := 0;
|
||||
vHistFileCount NUMBER := 0;
|
||||
vTotalFiles NUMBER := 0;
|
||||
|
||||
TYPE t_folder_list IS TABLE OF VARCHAR2(200);
|
||||
vDataFolders t_folder_list;
|
||||
vHistFolders t_folder_list;
|
||||
BEGIN
|
||||
-- Get bucket URIs
|
||||
vDataBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('DATA');
|
||||
vHistBucketUri := CT_MRDS.FILE_MANAGER.GET_BUCKET_URI('ARCHIVE');
|
||||
vCredentialName := CT_MRDS.ENV_MANAGER.gvCredentialName;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('ROLLBACK VERIFICATION TIME: ' || TO_CHAR(SYSTIMESTAMP, 'YYYY-MM-DD HH24:MI:SS.FF3'));
|
||||
DBMS_OUTPUT.PUT_LINE('DATA Bucket URI: ' || vDataBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST Bucket URI: ' || vHistBucketUri);
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
-- Initialize folder lists
|
||||
vDataFolders := t_folder_list(
|
||||
'ODS/CSDB/CSDB_DEBT/',
|
||||
'ODS/CSDB/CSDB_DEBT_DAILY/'
|
||||
);
|
||||
|
||||
vHistFolders := t_folder_list(
|
||||
'ARCHIVE/CSDB/CSDB_DEBT/',
|
||||
'ARCHIVE/CSDB/CSDB_DEBT_DAILY/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/',
|
||||
'ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/'
|
||||
);
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking DATA Bucket (should be empty)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check DATA bucket
|
||||
FOR i IN 1..vDataFolders.COUNT LOOP
|
||||
DECLARE
|
||||
vCount NUMBER := 0;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vDataFolders(i));
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vDataBucketUri || vDataFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.csv'
|
||||
) LOOP
|
||||
vCount := vCount + 1;
|
||||
vDataFileCount := vDataFileCount + 1;
|
||||
DBMS_OUTPUT.PUT_LINE(' [FOUND] ' || rec.object_name);
|
||||
END LOOP;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] No CSV files found');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Found ' || vCount || ' file(s) - may be pre-existing files from before installation');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] Folder does not exist or is empty');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Checking HIST Bucket (should be empty)');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
-- Check HIST bucket
|
||||
FOR i IN 1..vHistFolders.COUNT LOOP
|
||||
DECLARE
|
||||
vCount NUMBER := 0;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('Folder: ' || vHistFolders(i));
|
||||
|
||||
FOR rec IN (
|
||||
SELECT object_name
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => vCredentialName,
|
||||
location_uri => vHistBucketUri || vHistFolders(i)
|
||||
))
|
||||
WHERE object_name LIKE '%.parquet'
|
||||
FETCH FIRST 10 ROWS ONLY
|
||||
) LOOP
|
||||
vCount := vCount + 1;
|
||||
vHistFileCount := vHistFileCount + 1;
|
||||
IF vCount <= 5 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [FOUND] ' || rec.object_name);
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
IF vCount = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] No Parquet files found');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [INFO] Found ' || vCount || '+ file(s) (showing first 5) - may be pre-existing files from before installation');
|
||||
END IF;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
IF SQLCODE = -20404 THEN
|
||||
DBMS_OUTPUT.PUT_LINE(' [OK] Folder does not exist or is empty');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE(' [ERROR] ' || SQLERRM);
|
||||
END IF;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
vTotalFiles := vDataFileCount + vHistFileCount;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Rollback Verification Summary');
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
DBMS_OUTPUT.PUT_LINE('DATA bucket files remaining: ' || vDataFileCount);
|
||||
DBMS_OUTPUT.PUT_LINE('HIST bucket files remaining: ' || vHistFileCount || '+');
|
||||
DBMS_OUTPUT.PUT_LINE('Total files found: ' || vTotalFiles || '+');
|
||||
DBMS_OUTPUT.PUT_LINE('');
|
||||
|
||||
IF vTotalFiles = 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('[PASSED] ROLLBACK VERIFICATION PASSED');
|
||||
DBMS_OUTPUT.PUT_LINE(' All CSDB export files have been deleted or were not created');
|
||||
DBMS_OUTPUT.PUT_LINE(' Buckets are clean and ready for re-export if needed');
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('[INFO] ROLLBACK VERIFICATION COMPLETED');
|
||||
DBMS_OUTPUT.PUT_LINE(' Found ' || vTotalFiles || '+ file(s) remaining in buckets');
|
||||
DBMS_OUTPUT.PUT_LINE(' NOTE: These may be pre-existing files from before installation.');
|
||||
DBMS_OUTPUT.PUT_LINE(' Rollback only deletes files created during this export operation.');
|
||||
DBMS_OUTPUT.PUT_LINE(' If needed, manually verify and clean up remaining files.');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('=====================================================================================');
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('ERROR during rollback verification: ' || SQLERRM);
|
||||
RAISE;
|
||||
END;
|
||||
/
|
||||
|
||||
PROMPT
|
||||
PROMPT Rollback verification completed
|
||||
PROMPT
|
||||
165
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/README.md
Normal file
165
MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/README.md
Normal file
@@ -0,0 +1,165 @@
|
||||
# MARS-835: One-Time CSDB Data Export from Operational Database to External Tables
|
||||
|
||||
## Overview
|
||||
This package performs a one-time bulk export of CSDB data from operational database tables (OU_CSDB schema) to new external tables in OCI buckets. The export uses DATA_EXPORTER v2.4.0 with per-column date format handling to move historical data to either DATA bucket (CSV format) or HIST bucket (Parquet format with Hive-style partitioning).
|
||||
|
||||
**Migration Strategy:**
|
||||
- **Split Export (2 tables)**: DEBT, DEBT_DAILY - Last 6 months → DATA (CSV), Older data → HIST (Parquet)
|
||||
- **HIST Only (4 tables)**: INSTR_RAT_FULL, INSTR_DESC_FULL, ISSUER_RAT_FULL, ISSUER_DESC_FULL - All data → HIST (Parquet)
|
||||
|
||||
**Key Transformations:**
|
||||
- Column rename: `A_ETL_LOAD_SET_FK` → `A_WORKFLOW_HISTORY_KEY` (all tables)
|
||||
- Column removal: DEBT (2 columns), DEBT_DAILY (6 columns) not required in new structure
|
||||
|
||||
## Contents
|
||||
- `install_mars835.sql` - Master installation script with SPOOL logging
|
||||
- `rollback_mars835.sql` - Master rollback script
|
||||
- `01_MARS_835_*.sql` - Individual installation scripts
|
||||
- `91_MARS_835_*.sql` - Individual rollback scripts
|
||||
- `track_package_versions.sql` - Package version tracking
|
||||
- `verify_packages_version.sql` - Package verification
|
||||
|
||||
## Prerequisites
|
||||
- Oracle Database 23ai
|
||||
- ADMIN user access (required for all MARS installations)
|
||||
- ENV_MANAGER v3.1.0+
|
||||
- Required schema privileges
|
||||
|
||||
## Installation
|
||||
|
||||
### Option 1: Master Script (Recommended)
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user for proper privilege management
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/install_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
# Log file created: log/INSTALL_MARS_835_<PDB>_<timestamp>.log
|
||||
```
|
||||
|
||||
### Option 2: Individual Scripts
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "01_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
Get-Content "02_MARS_835_*.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
# ... etc
|
||||
```
|
||||
|
||||
## Verification
|
||||
```sql
|
||||
-- Verify package versions
|
||||
SELECT PACKAGE_NAME.GET_VERSION() FROM DUAL;
|
||||
|
||||
-- Check for errors (ADMIN user checks specific schema)
|
||||
SELECT * FROM ALL_ERRORS
|
||||
WHERE OWNER = 'CT_MRDS' -- Replace with target schema
|
||||
AND NAME = 'PACKAGE_NAME';
|
||||
|
||||
-- Verify no untracked changes
|
||||
SELECT ENV_MANAGER.CHECK_PACKAGE_CHANGES('CT_MRDS', 'PACKAGE_NAME') FROM DUAL;
|
||||
```
|
||||
|
||||
## Rollback
|
||||
```powershell
|
||||
# IMPORTANT: Execute as ADMIN user
|
||||
Get-Content "MARS_Packages/REL01_POST_DEACTIVATION/MARS-835/rollback_mars835.sql" | sql "ADMIN/Cloudpass#34@ggmichalski_high"
|
||||
|
||||
**NOTE**: Rollback for data exports is **NOT RECOMMENDED** as it would delete exported files from OCI buckets. Only use rollback if export failed and needs to be restarted.
|
||||
```
|
||||
|
||||
## Expected Changes
|
||||
|
||||
### Data Export Summary
|
||||
**6 CSDB tables exported from OU_CSDB schema:**
|
||||
|
||||
**Group 1: Split DATA + HIST (Time Critical)**
|
||||
1. **DEBT** - Last 6 months → DATA, Older → HIST
|
||||
2. **DEBT_DAILY** - Last 6 months → DATA, Older → HIST
|
||||
|
||||
**Group 2: HIST Only (Weekend Bulk)**
|
||||
3. **INSTR_RAT_FULL** - All data → HIST
|
||||
4. **INSTR_DESC_FULL** - All data → HIST
|
||||
5. **ISSUER_RAT_FULL** - All data → HIST
|
||||
6. **ISSUER_DESC_FULL** - All data → HIST
|
||||
|
||||
### Bucket Destinations (DEV environment)
|
||||
- **DATA Bucket**: `mrds_data_dev/ODS/CSDB/` (CSV format)
|
||||
- **HIST Bucket**: `mrds_hist_dev/ARCHIVE/CSDB/` (Parquet with partitioning)
|
||||
|
||||
### Column Mappings
|
||||
- **All tables**: `A_ETL_LOAD_SET_FK` renamed to `A_WORKFLOW_HISTORY_KEY`
|
||||
- **DEBT**: Removed columns: `IDIRDEPOSITORY`, `VA_BONDDURATION`
|
||||
- **DEBT_DAILY**: Removed columns: `STEPID`, `PROGRAMNAME`, `PROGRAMCEILING`, `PROGRAMSTATUS`, `ISSUERNACE21SECTOR`, `INSTRUMENTQUOTATIONBASIS`
|
||||
|
||||
## Testing
|
||||
|
||||
### Post-Export Verification
|
||||
|
||||
1. **Verify CSV files in DATA bucket** (DEBT, DEBT_DAILY - last 6 months):
|
||||
```sql
|
||||
-- Check exported files
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_data_dev/o/ODS/CSDB/'
|
||||
)) WHERE object_name LIKE '%CSDB_DEBT%';
|
||||
```
|
||||
|
||||
2. **Verify Parquet files in HIST bucket** (all 6 tables):
|
||||
```sql
|
||||
-- Check archived files with Hive partitioning
|
||||
SELECT object_name, bytes
|
||||
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
|
||||
credential_name => 'DEF_CRED_ARN',
|
||||
location_uri => 'https://objectstorage.region.oraclecloud.com/n/namespace/b/mrds_hist_dev/o/ARCHIVE/CSDB/'
|
||||
)) WHERE object_name LIKE '%PARTITION_YEAR=%';
|
||||
```
|
||||
|
||||
3. **Validate row counts match source tables**:
|
||||
```sql
|
||||
-- Compare counts between source and exported data
|
||||
SELECT COUNT(*) FROM OU_CSDB.DEBT;
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ODS; -- External table pointing to DATA
|
||||
SELECT COUNT(*) FROM ODS.CSDB_DEBT_ARCHIVE; -- External table pointing to HIST
|
||||
```
|
||||
|
||||
4. **Verify column mappings**:
|
||||
```sql
|
||||
-- Check A_WORKFLOW_HISTORY_KEY exists in exported data
|
||||
SELECT A_WORKFLOW_HISTORY_KEY, COUNT(*)
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE
|
||||
GROUP BY A_WORKFLOW_HISTORY_KEY;
|
||||
```
|
||||
|
||||
## Known Issues
|
||||
|
||||
### Timing Constraints
|
||||
- **DATA exports (DEBT, DEBT_DAILY)**: Must execute during parallel old+new loads phase after Production deployment
|
||||
- **HIST exports (all 6 tables)**: Can run anytime, recommended for weekend bulk execution to avoid interference
|
||||
|
||||
### Environment-Specific Configuration
|
||||
- Bucket names must be adjusted for each environment:
|
||||
- DEV: `mrds_data_dev`, `mrds_hist_dev`
|
||||
- TEST: `mrds_data_test`, `mrds_hist_test`
|
||||
- PROD: `mrds_data_prod`, `mrds_hist_prod`
|
||||
|
||||
### Data Cutoff Date
|
||||
- Export scripts use 6-month cutoff date calculated as `ADD_MONTHS(SYSDATE, -6)`
|
||||
- Verify cutoff aligns with business requirements before execution
|
||||
|
||||
### One-Time Execution
|
||||
- This is a **ONE-TIME data migration** package
|
||||
- After successful execution, package should be **deactivated** (moved to REL01_POST_DEACTIVATION)
|
||||
- Do not re-run unless explicitly required for data refresh
|
||||
|
||||
## Related
|
||||
- **JIRA**: MARS-835 - CSDB Data Export to External Tables
|
||||
- **Confluence**: FILE_MANAGER package - MRDS - Technical Team
|
||||
- **Confluence**: Table Setup Guide for FILE PROCESSOR System
|
||||
- **Source Schema**: OU_CSDB (Operational Database)
|
||||
- **Target Schema**: ODS (External Tables)
|
||||
- **Migration Type**: One-time bulk export (deactivated post-execution)
|
||||
|
||||
---
|
||||
|
||||
**Author:** Grzegorz Michalski
|
||||
**Date:** 2025-12-04
|
||||
**Version:** 1.0.0
|
||||
@@ -0,0 +1,207 @@
|
||||
# MARS-835: Required External Tables for Smart Column Mapping
|
||||
|
||||
## Overview
|
||||
This document lists all external tables required for MARS-835 data exports using DATA_EXPORTER v2.4.0 with Smart Column Mapping feature.
|
||||
|
||||
**Purpose**: Smart Column Mapping ensures CSV files are generated with columns in the EXACT order expected by external tables, preventing NULL values due to Oracle's positional CSV mapping.
|
||||
|
||||
---
|
||||
|
||||
## Required External Tables
|
||||
|
||||
### Group 1: DATA Bucket (CSV Format) - **CRITICAL**
|
||||
|
||||
#### 1. ODS.CSDB_DEBT_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 2. ODS.CSDB_DEBT_DAILY_DATA_ODS
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: CSV
|
||||
- **Bucket**: DATA (mrds_data_dev/ODS/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY (position 2 recommended)
|
||||
- **Critical**: Must use Smart Column Mapping to avoid NULL values in A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
### Group 2: ARCHIVE Bucket (Parquet Format) - **RECOMMENDED**
|
||||
|
||||
#### 3. ODS.CSDB_DEBT_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
- **Note**: Parquet uses schema-based mapping (column order less critical but Smart Column Mapping ensures consistency)
|
||||
|
||||
#### 4. ODS.CSDB_DEBT_DAILY_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_DEBT_DAILY
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_DEBT_DAILY/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 5. ODS.CSDB_INSTR_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 6. ODS.CSDB_INSTR_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_INSTR_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_INSTR_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 7. ODS.CSDB_ISSUER_RAT_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_RAT_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_RAT_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
#### 8. ODS.CSDB_ISSUER_DESC_FULL_ARCHIVE
|
||||
- **Source Table**: OU_CSDB.LEGACY_ISSUER_DESC_FULL
|
||||
- **Format**: Parquet with Hive partitioning
|
||||
- **Bucket**: ARCHIVE (mrds_hist_dev/ARCHIVE/CSDB/CSDB_ISSUER_DESC_FULL/)
|
||||
- **Key Column Mapping**: A_ETL_LOAD_SET_FK → A_WORKFLOW_HISTORY_KEY
|
||||
|
||||
---
|
||||
|
||||
## External Table Column Order Requirements
|
||||
|
||||
### **CRITICAL for CSV Tables** (DATA bucket):
|
||||
|
||||
All CSV external tables MUST have **A_WORKFLOW_HISTORY_KEY at position 2**:
|
||||
|
||||
```
|
||||
Position 1: A_KEY (NUMBER)
|
||||
Position 2: A_WORKFLOW_HISTORY_KEY (NUMBER) ← MUST BE HERE!
|
||||
Position 3+: Other columns in any order
|
||||
```
|
||||
|
||||
**Reason**: Oracle External Tables with CSV format use **positional mapping** (ignore header row). If source table has A_ETL_LOAD_SET_FK at position 72, but CSV puts it at position 72 while external table expects A_WORKFLOW_HISTORY_KEY at position 2, the external table will try to read position 2 (which might be a DATE column) as NUMBER → conversion fails → NULL value.
|
||||
|
||||
**Solution**: Smart Column Mapping (v2.4.0) generates CSV columns in EXTERNAL TABLE order, ensuring position 2 has the correct NUMBER value.
|
||||
|
||||
### **OPTIONAL for Parquet Tables** (ARCHIVE bucket):
|
||||
|
||||
Parquet format uses **schema-based mapping** (column names). Column order doesn't matter, but Smart Column Mapping provides consistency.
|
||||
|
||||
---
|
||||
|
||||
## Creation Script Example
|
||||
|
||||
### CSV External Table (CRITICAL - Correct Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_DATA_ODS
|
||||
-- IMPORTANT: A_WORKFLOW_HISTORY_KEY must be at position 2!
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_DATA_ODS',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ODS/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvDataBucketUri,
|
||||
pFormat => 'CSV' -- Uses positional mapping!
|
||||
);
|
||||
END;
|
||||
/
|
||||
|
||||
-- Verify column order (A_WORKFLOW_HISTORY_KEY should be position 2)
|
||||
SELECT column_id, column_name, data_type
|
||||
FROM all_tab_columns
|
||||
WHERE table_name = 'CSDB_DEBT_DATA_ODS'
|
||||
AND owner = 'ODS'
|
||||
ORDER BY column_id;
|
||||
```
|
||||
|
||||
### Parquet External Table (Optional Column Order)
|
||||
|
||||
```sql
|
||||
-- Example: ODS.CSDB_DEBT_ARCHIVE
|
||||
-- Column order flexible (schema-based mapping)
|
||||
|
||||
BEGIN
|
||||
ODS.FILE_MANAGER_ODS.CREATE_EXTERNAL_TABLE(
|
||||
pTableName => 'CSDB_DEBT_ARCHIVE',
|
||||
pTemplateTableName => 'CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE',
|
||||
pPrefix => 'ARCHIVE/CSDB/CSDB_DEBT',
|
||||
pBucketUri => CT_MRDS.ENV_MANAGER.gvArchiveBucketUri,
|
||||
pFormat => 'PARQUET' -- Uses schema-based mapping
|
||||
);
|
||||
END;
|
||||
/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Template Tables Required
|
||||
|
||||
All external tables require corresponding template tables in CT_ET_TEMPLATES schema:
|
||||
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_DEBT_DAILY_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_INSTR_DESC_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_RAT_FULL_TEMPLATE`
|
||||
- `CT_ET_TEMPLATES.CSDB_ISSUER_DESC_FULL_TEMPLATE`
|
||||
|
||||
**Note**: Template tables must be created by ADMIN or CT_ET_TEMPLATES user (MRDS_LOADER cannot create them).
|
||||
|
||||
---
|
||||
|
||||
## Verification Checklist
|
||||
|
||||
Before running MARS-835 exports:
|
||||
|
||||
- [ ] All 8 external tables exist in ODS schema
|
||||
- [ ] CSV tables (DATA bucket) have A_WORKFLOW_HISTORY_KEY at position 2
|
||||
- [ ] Template tables exist in CT_ET_TEMPLATES schema
|
||||
- [ ] MRDS_LOADER has EXECUTE privilege on ODS.FILE_MANAGER_ODS
|
||||
- [ ] ODS schema has access to CT_MRDS.ENV_MANAGER for logging
|
||||
- [ ] DATA_EXPORTER v2.4.0 deployed with Smart Column Mapping feature
|
||||
|
||||
---
|
||||
|
||||
## Testing Verification
|
||||
|
||||
After export, verify A_WORKFLOW_HISTORY_KEY is not NULL:
|
||||
|
||||
```sql
|
||||
-- CSV tables (should be 100% populated)
|
||||
SELECT 'CSDB_DEBT_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DATA_ODS;
|
||||
|
||||
SELECT 'CSDB_DEBT_DAILY_DATA_ODS' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_DAILY_DATA_ODS;
|
||||
|
||||
-- Parquet tables (should also be 100% populated)
|
||||
SELECT 'CSDB_DEBT_ARCHIVE' AS TABLE_NAME,
|
||||
COUNT(*) AS TOTAL_ROWS,
|
||||
COUNT(A_WORKFLOW_HISTORY_KEY) AS NON_NULL_COUNT,
|
||||
ROUND(COUNT(A_WORKFLOW_HISTORY_KEY) * 100.0 / NULLIF(COUNT(*), 0), 2) AS SUCCESS_RATE_PCT
|
||||
FROM ODS.CSDB_DEBT_ARCHIVE;
|
||||
```
|
||||
|
||||
**Expected Result**: SUCCESS_RATE_PCT = 100.00 for all tables
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [DATA_EXPORTER v2.4.0 Smart Column Mapping Examples](../MARS-835-PREHOOK/current_version/v2.3.0/DATA_EXPORTER_v2.4.0_Smart_Column_Mapping_Examples.sql)
|
||||
- [Oracle External Tables Column Order Issue](../../confluence/additions/Oracle_External_Tables_Column_Order_Issue.md)
|
||||
- [MARS-835 README](README.md)
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2026-01-09
|
||||
**Author**: GitHub Copilot (MARS-835 Update)
|
||||
@@ -0,0 +1,104 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835 INSTALL SCRIPT: CSDB Data Export to External Tables
|
||||
-- ===================================================================
|
||||
-- Purpose: One-time bulk export of 6 CSDB tables from OU_CSDB schema
|
||||
-- to OCI buckets (DATA/CSV and HIST/Parquet formats)
|
||||
-- Uses DATA_EXPORTER v2.4.0 with per-column date format handling for data migration
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
-- Version: 1.0.0
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- Log files are automatically created in log/ subdirectory
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/INSTALL_MARS_835_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
-- Set current schema context (optional - use when modifying packages in specific schema)
|
||||
-- ALTER SESSION SET CURRENT_SCHEMA = CT_MRDS;
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835: CSDB Data Export to External Tables (One-Time Migration)
|
||||
PROMPT =========================================================================
|
||||
PROMPT
|
||||
PROMPT This script will export 6 CSDB tables to OCI buckets:
|
||||
PROMPT
|
||||
PROMPT GROUP 1 - Split DATA + HIST (Time Critical):
|
||||
PROMPT - DEBT: Last 6 months to DATA, older to HIST
|
||||
PROMPT - DEBT_DAILY: Last 6 months to DATA, older to HIST
|
||||
PROMPT
|
||||
PROMPT GROUP 2 - HIST Only (Weekend Bulk):
|
||||
PROMPT - INSTR_RAT_FULL, INSTR_DESC_FULL
|
||||
PROMPT - ISSUER_RAT_FULL, ISSUER_DESC_FULL
|
||||
PROMPT
|
||||
PROMPT Column transformations:
|
||||
PROMPT - A_ETL_LOAD_SET_FK renamed to A_WORKFLOW_HISTORY_KEY (all tables)
|
||||
PROMPT - Legacy columns removed from DEBT and DEBT_DAILY
|
||||
PROMPT
|
||||
PROMPT Expected Duration: 30-60 minutes (depends on data volume)
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm installation with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with installation, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Installation aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Pre-Check: Verify existing files in DATA and HIST buckets
|
||||
PROMPT =========================================================================
|
||||
@@00_MARS_835_pre_check_existing_files.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Export Group 1 - Split DATA + HIST (DEBT, DEBT_DAILY)
|
||||
PROMPT =========================================================================
|
||||
@@01_MARS_835_install_step1.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Export Group 2 - HIST Only (4 tables)
|
||||
PROMPT =========================================================================
|
||||
@@02_MARS_835_install_step2.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Verify Exports (File Presence Check)
|
||||
PROMPT =========================================================================
|
||||
@@03_MARS_835_verify_exports.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 4: Verify Record Counts (Source vs Archive)
|
||||
PROMPT =========================================================================
|
||||
@@04_MARS_835_verify_record_counts.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835 Installation - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
PROMPT Check the log file for complete installation details.
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,73 @@
|
||||
-- ===================================================================
|
||||
-- MARS-835 ROLLBACK SCRIPT: CSDB Data Export Rollback
|
||||
-- ===================================================================
|
||||
-- Purpose: Rollback MARS-835 - Delete exported CSV/Parquet files from OCI buckets
|
||||
-- WARNING: This will DELETE all exported data files!
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-17
|
||||
|
||||
-- Dynamic spool file generation (using SYS_CONTEXT - no DBA privileges required)
|
||||
-- IMPORTANT: Ensure log/ directory exists before SPOOL (use host mkdir)
|
||||
host mkdir log 2>nul
|
||||
|
||||
var filename VARCHAR2(100)
|
||||
BEGIN
|
||||
:filename := 'log/ROLLBACK_MARS_835_' || SYS_CONTEXT('USERENV', 'CON_NAME') || '_' || TO_CHAR(SYSDATE,'YYYYMMDD_HH24MISS') || '.log';
|
||||
END;
|
||||
/
|
||||
column filename new_value _filename
|
||||
select :filename filename from dual;
|
||||
spool &_filename
|
||||
|
||||
SET ECHO OFF
|
||||
SET TIMING ON
|
||||
SET SERVEROUTPUT ON SIZE UNLIMITED
|
||||
SET PAUSE OFF
|
||||
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835: Rollback CSDB Data Export
|
||||
PROMPT =========================================================================
|
||||
PROMPT WARNING: This will DELETE exported CSV and Parquet files from OCI buckets!
|
||||
PROMPT - DATA bucket: mrds_data_dev/ODS/CSDB/
|
||||
PROMPT - HIST bucket: mrds_hist_dev/ARCHIVE/CSDB/
|
||||
PROMPT
|
||||
PROMPT Only proceed if export failed and needs to be restarted!
|
||||
PROMPT =========================================================================
|
||||
|
||||
-- Confirm rollback with user
|
||||
ACCEPT continue CHAR PROMPT 'Type YES to continue with rollback, or Ctrl+C to abort: '
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE
|
||||
BEGIN
|
||||
IF '&continue' IS NULL OR TRIM('&continue') IS NULL OR UPPER(TRIM('&continue')) != 'YES' THEN
|
||||
RAISE_APPLICATION_ERROR(-20001, 'Rollback aborted by user');
|
||||
END IF;
|
||||
END;
|
||||
/
|
||||
WHENEVER SQLERROR CONTINUE
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 1: Delete Group 2 Exported Files (HIST only - 4 tables)
|
||||
PROMPT =========================================================================
|
||||
@@92_MARS_835_rollback_step2.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 2: Delete Group 1 Exported Files (DATA + HIST - 2 tables)
|
||||
PROMPT =========================================================================
|
||||
@@91_MARS_835_rollback_step1.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT Step 3: Verify Rollback Completed
|
||||
PROMPT =========================================================================
|
||||
@@99_MARS_835_verify_rollback.sql
|
||||
|
||||
PROMPT
|
||||
PROMPT =========================================================================
|
||||
PROMPT MARS-835 Rollback - COMPLETED
|
||||
PROMPT =========================================================================
|
||||
|
||||
spool off
|
||||
|
||||
quit;
|
||||
@@ -0,0 +1,92 @@
|
||||
-- ===================================================================
|
||||
-- Simple Package Version Tracking Script
|
||||
-- ===================================================================
|
||||
-- Purpose: Track specified Oracle package versions
|
||||
-- Author: Grzegorz Michalski
|
||||
-- Date: 2025-12-04
|
||||
-- Version: 3.1.0 - List-Based Edition
|
||||
--
|
||||
-- USAGE:
|
||||
-- 1. Edit package list below (add/remove packages as needed)
|
||||
-- 2. Include in your install/rollback script: @@track_package_versions.sql
|
||||
-- ===================================================================
|
||||
|
||||
SET SERVEROUTPUT ON;
|
||||
|
||||
DECLARE
|
||||
TYPE t_package_rec IS RECORD (
|
||||
owner VARCHAR2(50),
|
||||
name VARCHAR2(50),
|
||||
version VARCHAR2(50)
|
||||
);
|
||||
TYPE t_packages IS TABLE OF t_package_rec;
|
||||
TYPE t_string_array IS TABLE OF VARCHAR2(100);
|
||||
|
||||
-- ===================================================================
|
||||
-- PACKAGE LIST - Edit this array to specify packages to track
|
||||
-- ===================================================================
|
||||
-- Add or remove entries as needed for your MARS issue
|
||||
-- Format: 'SCHEMA.PACKAGE_NAME'
|
||||
-- ===================================================================
|
||||
vPackageList t_string_array := t_string_array(
|
||||
'CT_MRDS.FILE_MANAGER',
|
||||
'ODS.FILE_MANAGER_ODS'
|
||||
);
|
||||
-- ===================================================================
|
||||
|
||||
vPackages t_packages := t_packages();
|
||||
vVersion VARCHAR2(50);
|
||||
vCount NUMBER := 0;
|
||||
vOwner VARCHAR2(50);
|
||||
vPackageName VARCHAR2(50);
|
||||
vDotPos NUMBER;
|
||||
BEGIN
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
DBMS_OUTPUT.PUT_LINE('Package Version Tracking');
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
|
||||
-- Process each package in the list
|
||||
FOR i IN 1..vPackageList.COUNT LOOP
|
||||
vDotPos := INSTR(vPackageList(i), '.');
|
||||
IF vDotPos > 0 THEN
|
||||
vOwner := SUBSTR(vPackageList(i), 1, vDotPos - 1);
|
||||
vPackageName := SUBSTR(vPackageList(i), vDotPos + 1);
|
||||
|
||||
BEGIN
|
||||
EXECUTE IMMEDIATE 'SELECT ' || vPackageList(i) || '.GET_VERSION() FROM DUAL'
|
||||
INTO vVersion;
|
||||
|
||||
vPackages.EXTEND;
|
||||
vPackages(vPackages.COUNT).owner := vOwner;
|
||||
vPackages(vPackages.COUNT).name := vPackageName;
|
||||
vPackages(vPackages.COUNT).version := vVersion;
|
||||
|
||||
CT_MRDS.ENV_MANAGER.TRACK_PACKAGE_VERSION(
|
||||
pPackageOwner => vOwner,
|
||||
pPackageName => vPackageName,
|
||||
pPackageVersion => vVersion,
|
||||
pPackageBuildDate => TO_CHAR(SYSDATE, 'YYYY-MM-DD HH24:MI:SS'),
|
||||
pPackageAuthor => 'Grzegorz Michalski'
|
||||
);
|
||||
vCount := vCount + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Error tracking ' || vPackageList(i) || ': ' || SQLERRM);
|
||||
END;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- Display results
|
||||
IF vPackages.COUNT > 0 THEN
|
||||
DBMS_OUTPUT.PUT_LINE('Packages tracked: ' || vCount || ' of ' || vPackages.COUNT);
|
||||
FOR i IN 1..vPackages.COUNT LOOP
|
||||
DBMS_OUTPUT.PUT_LINE(' ' || vPackages(i).owner || '.' || vPackages(i).name ||
|
||||
' (v' || vPackages(i).version || ')');
|
||||
END LOOP;
|
||||
ELSE
|
||||
DBMS_OUTPUT.PUT_LINE('No packages found in list');
|
||||
END IF;
|
||||
|
||||
DBMS_OUTPUT.PUT_LINE('========================================');
|
||||
END;
|
||||
/
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user