pRegisterExport

This commit is contained in:
Grzegorz Michalski
2026-02-11 09:53:53 +01:00
parent 3eb39091de
commit 96e3e2f845
2 changed files with 156 additions and 142 deletions

View File

@@ -1004,9 +1004,6 @@ AS
* Allows specifying custom column list or uses T.* if pColumnList is NULL.
* Validates that all columns in pColumnList exist in the target table.
* Automatically adds 'T.' prefix to column names in pColumnList.
* When pRegisterExport=TRUE, successfully exported files are registered in:
* - CT_MRDS.A_WORKFLOW_HISTORY (one record per YEAR/MONTH with export timestamp)
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location and partition info)
* @example
* begin
* DATA_EXPORTER.EXPORT_TABLE_DATA_TO_CSV_BY_DATE(
@@ -1018,8 +1015,7 @@ AS
* pFileName => 'my_export.csv',
* pColumnList => 'COLUMN1, COLUMN2, COLUMN3', -- Optional
* pMinDate => DATE '2024-01-01',
* pMaxDate => SYSDATE,
* pRegisterExport => TRUE -- Registers exports to tracking tables
* pMaxDate => SYSDATE
* );
* end;
**/
@@ -1050,10 +1046,16 @@ AS
vBucketUri VARCHAR2(4000);
vCurrentCol VARCHAR2(128);
vPartitions partition_tab;
vWorkflowHistoryKey NUMBER;
vSourceFileReceivedKey NUMBER;
vFileName VARCHAR2(1000);
vFileUri VARCHAR2(4000);
-- Variables for A_SOURCE_FILE_CONFIG lookup
vSourceKey VARCHAR2(100);
vTableId VARCHAR2(200);
vConfigKey NUMBER := -1;
vSlashPos1 NUMBER;
vSlashPos2 NUMBER;
vFileUri VARCHAR2(4000);
BEGIN
vParameters := ENV_MANAGER.FORMAT_PARAMETERS(SYS.ODCIVARCHAR2LIST( 'pSchemaName => '''||nvl(pSchemaName, 'NULL')||''''
@@ -1068,7 +1070,7 @@ AS
,'pParallelDegree => '''||nvl(TO_CHAR(pParallelDegree), 'NULL')||''''
,'pTemplateTableName => '''||nvl(pTemplateTableName, 'NULL')||''''
,'pMaxFileSize => '''||nvl(TO_CHAR(pMaxFileSize), 'NULL')||''''
,'pRegisterExport => '''||CASE WHEN pRegisterExport THEN 'TRUE' ELSE 'FALSE' END||''''
,'pRegisterExport => '''||CASE WHEN pRegisterExport THEN 'TRUE' ELSE 'FALSE' END||''''
,'pCredentialName => '''||nvl(pCredentialName, 'NULL')||''''
));
ENV_MANAGER.LOG_PROCESS_EVENT('Start','INFO', vParameters);
@@ -1145,58 +1147,6 @@ AS
pMaxFileSize => pMaxFileSize,
pParameters => vParameters
);
-- Register export if requested
IF pRegisterExport THEN
-- Construct filename and URI for this partition
vFileName := NVL(vFileBaseName, UPPER(REPLACE(vTableName, vSchemaName || '.', ''))) || '_' || vPartitions(i).year || vPartitions(i).month || '.csv';
vFileUri := vBucketUri || CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || sanitizeFilename(vFileName);
-- Create A_WORKFLOW_HISTORY record for this export (one per year/month)
vWorkflowHistoryKey := CT_MRDS.A_WORKFLOW_HISTORY_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_WORKFLOW_HISTORY (
A_WORKFLOW_HISTORY_KEY,
SERVICE_NAME,
ORCHESTRATION_RUN_ID,
WORKFLOW_NAME,
WORKFLOW_START,
WORKFLOW_END,
WORKFLOW_SUCCESSFUL
) VALUES (
vWorkflowHistoryKey,
'DATA_EXPORTER',
'CSV_EXPORT_' || TO_CHAR(SYSTIMESTAMP, 'YYYYMMDDHH24MISSFF'),
'DATA_EXPORT_' || UPPER(REPLACE(vTableName, vSchemaName || '.', '')) || '_' || vPartitions(i).year || vPartitions(i).month,
SYSTIMESTAMP,
SYSTIMESTAMP,
'Y'
);
-- Create A_SOURCE_FILE_RECEIVED record for this export
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
A_SOURCE_FILE_RECEIVED_KEY,
A_SOURCE_FILE_CONFIG_KEY,
SOURCE_FILE_NAME,
RECEPTION_DATE,
PROCESSING_STATUS,
PARTITION_YEAR,
PARTITION_MONTH,
ARCH_FILE_NAME
) VALUES (
vSourceFileReceivedKey,
-1, -- Special marker for exported files (no config)
vFileUri,
SYSDATE,
'ARCHIVED',
vPartitions(i).year,
vPartitions(i).month,
vFileName
);
COMMIT;
ENV_MANAGER.LOG_PROCESS_EVENT('Registered export: WorkflowKey=' || vWorkflowHistoryKey || ', FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vFileName, 'INFO', vParameters);
END IF;
END LOOP;
-- Parallel processing (parallel degree > 1)
@@ -1291,64 +1241,6 @@ AS
-- Clean up task
DBMS_PARALLEL_EXECUTE.DROP_TASK(task_name => vTaskName);
-- Register exports if requested (after successful parallel processing)
IF pRegisterExport THEN
ENV_MANAGER.LOG_PROCESS_EVENT('Registering ' || vPartitions.COUNT || ' parallel exports to A_WORKFLOW_HISTORY and A_SOURCE_FILE_RECEIVED', 'INFO', vParameters);
FOR i IN 1 .. vPartitions.COUNT LOOP
-- Construct filename and URI for this partition
vFileName := NVL(vFileBaseName, UPPER(REPLACE(vTableName, vSchemaName || '.', ''))) || '_' || vPartitions(i).year || vPartitions(i).month || '.csv';
vFileUri := vBucketUri || CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || sanitizeFilename(vFileName);
-- Create A_WORKFLOW_HISTORY record for this export (one per year/month)
vWorkflowHistoryKey := CT_MRDS.A_WORKFLOW_HISTORY_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_WORKFLOW_HISTORY (
A_WORKFLOW_HISTORY_KEY,
SERVICE_NAME,
ORCHESTRATION_RUN_ID,
WORKFLOW_NAME,
WORKFLOW_START,
WORKFLOW_END,
WORKFLOW_SUCCESSFUL
) VALUES (
vWorkflowHistoryKey,
'DATA_EXPORTER',
'CSV_EXPORT_' || TO_CHAR(SYSTIMESTAMP, 'YYYYMMDDHH24MISSFF'),
'DATA_EXPORT_' || UPPER(REPLACE(vTableName, vSchemaName || '.', '')) || '_' || vPartitions(i).year || vPartitions(i).month,
SYSTIMESTAMP,
SYSTIMESTAMP,
'Y'
);
-- Create A_SOURCE_FILE_RECEIVED record for this export
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
A_SOURCE_FILE_RECEIVED_KEY,
A_SOURCE_FILE_CONFIG_KEY,
SOURCE_FILE_NAME,
RECEPTION_DATE,
PROCESSING_STATUS,
PARTITION_YEAR,
PARTITION_MONTH,
ARCH_FILE_NAME
) VALUES (
vSourceFileReceivedKey,
-1, -- Special marker for exported files (no config)
vFileUri,
SYSDATE,
'ARCHIVED',
vPartitions(i).year,
vPartitions(i).month,
vFileName
);
ENV_MANAGER.LOG_PROCESS_EVENT('Registered parallel export: WorkflowKey=' || vWorkflowHistoryKey || ', FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vFileName, 'DEBUG', vParameters);
END LOOP;
COMMIT;
ENV_MANAGER.LOG_PROCESS_EVENT('Successfully registered all ' || vPartitions.COUNT || ' exports', 'INFO', vParameters);
END IF;
-- Clean up chunks for THIS specific task only (session-safe)
-- CRITICAL: Use TASK_NAME filter to avoid deleting chunks from other active CSV sessions
DELETE FROM CT_MRDS.A_PARALLEL_EXPORT_CHUNKS WHERE TASK_NAME = vTaskName;
@@ -1371,6 +1263,130 @@ AS
END IF;
END IF;
-- Register exported files to A_SOURCE_FILE_RECEIVED if requested (after successful export)
IF pRegisterExport THEN
-- Lookup A_SOURCE_FILE_CONFIG_KEY based on pFolderName parsing
-- Format: {BUCKET_AREA}/{SOURCE_KEY}/{TABLE_ID}
-- Example: 'ODS/CSDB/CSDB_DEBT_DAILY' -> SOURCE_KEY='CSDB', TABLE_ID='CSDB_DEBT_DAILY'
-- Parse pFolderName to extract SOURCE_KEY and TABLE_ID
vSlashPos1 := INSTR(pFolderName, '/', 1, 1); -- First '/' position
vSlashPos2 := INSTR(pFolderName, '/', 1, 2); -- Second '/' position
IF vSlashPos1 > 0 AND vSlashPos2 > 0 THEN
-- Extract segment 2 (SOURCE_KEY) and segment 3 (TABLE_ID)
vSourceKey := SUBSTR(pFolderName, vSlashPos1 + 1, vSlashPos2 - vSlashPos1 - 1);
vTableId := SUBSTR(pFolderName, vSlashPos2 + 1);
-- Find configuration based on SOURCE_KEY and TABLE_ID
BEGIN
SELECT A_SOURCE_FILE_CONFIG_KEY
INTO vConfigKey
FROM CT_MRDS.A_SOURCE_FILE_CONFIG
WHERE A_SOURCE_KEY = vSourceKey
AND TABLE_ID = vTableId
AND SOURCE_FILE_TYPE = 'INPUT'
AND ROWNUM = 1;
ENV_MANAGER.LOG_PROCESS_EVENT('Found config key: ' || vConfigKey || ' for SOURCE=' || vSourceKey || ', TABLE=' || vTableId, 'DEBUG', vParameters);
EXCEPTION
WHEN NO_DATA_FOUND THEN
vConfigKey := -1;
ENV_MANAGER.LOG_PROCESS_EVENT('No config found for SOURCE=' || vSourceKey || ', TABLE=' || vTableId || ' - using default (-1)', 'INFO', vParameters);
END;
ELSE
-- Cannot parse folder name - use default
vConfigKey := -1;
ENV_MANAGER.LOG_PROCESS_EVENT('Cannot parse pFolderName: ' || pFolderName || ' - using default (-1)', 'WARNING', vParameters);
END IF;
ENV_MANAGER.LOG_PROCESS_EVENT('Registering ' || vPartitions.COUNT || ' exported files to A_SOURCE_FILE_RECEIVED with config key: ' || vConfigKey, 'INFO', vParameters);
FOR i IN 1 .. vPartitions.COUNT LOOP
-- Construct filename and URI for this partition
vFileName := NVL(vFileBaseName, UPPER(REPLACE(vTableName, vSchemaName || '.', ''))) || '_' || vPartitions(i).year || vPartitions(i).month || '.csv';
vFileUri := vBucketUri || CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || sanitizeFilename(vFileName);
-- Get file metadata from OCI bucket (CHECKSUM, CREATED, BYTES)
DECLARE
vChecksum VARCHAR2(128);
vCreated TIMESTAMP WITH TIME ZONE;
vBytes NUMBER;
vSanitizedFileName VARCHAR2(1000);
BEGIN
-- Sanitize filename first (PL/SQL function cannot be used directly in SQL)
vSanitizedFileName := sanitizeFilename(vFileName);
SELECT checksum, created, bytes
INTO vChecksum, vCreated, vBytes
FROM TABLE(DBMS_CLOUD.LIST_OBJECTS(
credential_name => pCredentialName,
location_uri => vBucketUri
))
WHERE object_name = CASE WHEN pFolderName IS NOT NULL THEN pFolderName || '/' ELSE '' END || vSanitizedFileName;
-- Create A_SOURCE_FILE_RECEIVED record for this export with metadata
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
A_SOURCE_FILE_RECEIVED_KEY,
A_SOURCE_FILE_CONFIG_KEY,
SOURCE_FILE_NAME,
CHECKSUM,
CREATED,
BYTES,
RECEPTION_DATE,
PROCESSING_STATUS,
PARTITION_YEAR,
PARTITION_MONTH,
ARCH_FILE_NAME
) VALUES (
vSourceFileReceivedKey,
vConfigKey, -- Config key from A_SOURCE_FILE_CONFIG lookup
vFileUri,
vChecksum,
vCreated,
vBytes,
SYSDATE,
'ARCHIVED',
vPartitions(i).year,
vPartitions(i).month,
vFileName
);
ENV_MANAGER.LOG_PROCESS_EVENT('Registered file: FileReceivedKey=' || vSourceFileReceivedKey || ', File=' || vFileName || ', Size=' || vBytes || ' bytes', 'DEBUG', vParameters);
EXCEPTION
WHEN NO_DATA_FOUND THEN
-- File not found in bucket - log warning but continue
ENV_MANAGER.LOG_PROCESS_EVENT('WARNING: File not found in bucket for metadata lookup: ' || vFileName, 'WARNING', vParameters);
-- Insert without metadata
vSourceFileReceivedKey := CT_MRDS.A_SOURCE_FILE_RECEIVED_KEY_SEQ.NEXTVAL;
INSERT INTO CT_MRDS.A_SOURCE_FILE_RECEIVED (
A_SOURCE_FILE_RECEIVED_KEY,
A_SOURCE_FILE_CONFIG_KEY,
SOURCE_FILE_NAME,
RECEPTION_DATE,
PROCESSING_STATUS,
PARTITION_YEAR,
PARTITION_MONTH,
ARCH_FILE_NAME
) VALUES (
vSourceFileReceivedKey,
vConfigKey, -- Config key from A_SOURCE_FILE_CONFIG lookup
vFileUri,
SYSDATE,
'ARCHIVED',
vPartitions(i).year,
vPartitions(i).month,
vFileName
);
END;
END LOOP;
COMMIT;
ENV_MANAGER.LOG_PROCESS_EVENT('Successfully registered all ' || vPartitions.COUNT || ' files', 'INFO', vParameters);
END IF;
ENV_MANAGER.LOG_PROCESS_EVENT('Export completed successfully for ' || vPartitions.COUNT || ' files', 'INFO', vParameters);
ENV_MANAGER.LOG_PROCESS_EVENT('End','INFO',vParameters);

View File

@@ -8,28 +8,27 @@ AS
* which returns documentation text for confluence page (to Copy-Paste it).
**/
-- Package Version Information (Semantic Versioning: MAJOR.MINOR.PATCH)
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.0';
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-09 20:00:00';
-- Package Version Information
PACKAGE_VERSION CONSTANT VARCHAR2(10) := '2.7.1';
PACKAGE_BUILD_DATE CONSTANT VARCHAR2(20) := '2026-02-11 10:00:00';
PACKAGE_AUTHOR CONSTANT VARCHAR2(100) := 'Grzegorz Michalski';
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
-- Version History (Latest changes first)
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
'v2.7.0 (2026-02-09): NEW FEATURE - Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, successfully exported files are registered in A_WORKFLOW_HISTORY (one record per YEAR/MONTH) and A_SOURCE_FILE_RECEIVED tables for tracking and audit purposes.' || cgBL ||
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM and DBMS_UTILITY.FORMAT_ERROR_BACKTRACE cannot be used directly in SQL UPDATE statements. Now properly assigned to vgMsgTmp variable before UPDATE.' || cgBL ||
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Changed DELETE to filter by age (>24h) instead of deleting all COMPLETED chunks. Prevents concurrent sessions from deleting each other chunks. Session-safe cleanup with TASK_NAME filtering. Enables true parallel execution of multiple export jobs.' || cgBL ||
'v2.6.1 (2026-01-28): Added DELETE_FAILED_EXPORT_FILE procedure to clean up partial/corrupted files before retry. When partition fails mid-export, partial file is deleted before retry to prevent Oracle from creating _1 suffixed duplicates. Ensures clean retry without orphaned files in OCI bucket.' || cgBL ||
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry. System now restarts ONLY failed partitions instead of re-exporting all data. Added ERROR_MESSAGE and EXPORT_TIMESTAMP columns for better error handling and monitoring. Prevents duplicate file creation when parallel tasks fail (e.g., 22 partitions with 16 threads, 3 failures no longer duplicates 19 successful exports).' || cgBL ||
'v2.5.0 (2026-01-26): Added recorddelimiter parameter with CRLF (CHR(13)||CHR(10)) for CSV exports to ensure Windows-compatible line endings. Improves cross-platform compatibility when CSV files are opened in Windows applications (Notepad, Excel).' || cgBL ||
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || cgBL ||
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || cgBL ||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || cgBL ||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || cgBL ||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || cgBL ||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package';
-- Version History (last 3-5 changes)
VERSION_HISTORY CONSTANT VARCHAR2(4000) :=
'v2.7.1 (2026-02-11): AUTO-LOOKUP A_SOURCE_FILE_CONFIG_KEY - Parse pFolderName (format: BUCKET/SOURCE/TABLE_ID) to automatically find config key from A_SOURCE_FILE_CONFIG. Example: ODS/CSDB/CSDB_DEBT_DAILY extracts SOURCE_KEY=CSDB, TABLE_ID=CSDB_DEBT_DAILY and looks up config. No more hardcoded -1 in A_SOURCE_FILE_RECEIVED.' || CHR(10) ||
'v2.7.0 (2026-02-10): Added pRegisterExport parameter to EXPORT_TABLE_DATA_TO_CSV_BY_DATE. When TRUE, registers each exported CSV file in A_SOURCE_FILE_RECEIVED with metadata (CHECKSUM, CREATED, BYTES) from DBMS_CLOUD.LIST_OBJECTS. Enables file tracking and integrity verification.' || CHR(10) ||
'v2.6.3 (2026-01-28): COMPILATION FIX - Resolved ORA-00904 error in EXPORT_PARTITION_PARALLEL. SQLERRM and DBMS_UTILITY.FORMAT_ERROR_BACKTRACE cannot be used directly in SQL UPDATE statements. Now properly assigned to vgMsgTmp variable before UPDATE.' || CHR(10) ||
'v2.6.2 (2026-01-28): CRITICAL FIX - Race condition when multiple exports run simultaneously. Changed DELETE to filter by age (>24h) instead of deleting all COMPLETED chunks. Prevents concurrent sessions from deleting each other chunks. Session-safe cleanup with TASK_NAME filtering. Enables true parallel execution of multiple export jobs.' || CHR(10) ||
'v2.6.0 (2026-01-28): CRITICAL FIX - Added STATUS tracking to A_PARALLEL_EXPORT_CHUNKS table to prevent data duplication on retry. System now restarts ONLY failed partitions instead of re-exporting all data. Added ERROR_MESSAGE and EXPORT_TIMESTAMP columns for better error handling and monitoring. Prevents duplicate file creation when parallel tasks fail (e.g., 22 partitions with 16 threads, 3 failures no longer duplicates 19 successful exports).' || CHR(10) ||
'v2.5.0 (2026-01-26): Added recorddelimiter parameter with CRLF (CHR(13)||CHR(10)) for CSV exports to ensure Windows-compatible line endings. Improves cross-platform compatibility when CSV files are opened in Windows applications (Notepad, Excel).' || CHR(10) ||
'v2.4.0 (2026-01-11): Added pTemplateTableName parameter for per-column date format configuration. Implements dynamic query building with TO_CHAR for each date/timestamp column using FILE_MANAGER.GET_DATE_FORMAT. Supports 3-tier hierarchy: column-specific, template DEFAULT, global fallback. Eliminates single dateformat limitation of DBMS_CLOUD.EXPORT_DATA.' || CHR(10) ||
'v2.3.0 (2025-12-20): Added parallel partition processing using DBMS_PARALLEL_EXECUTE. New pParallelDegree parameter (1-16, default 1) for EXPORT_TABLE_DATA_BY_DATE and EXPORT_TABLE_DATA_TO_CSV_BY_DATE procedures. Each year/month partition processed in separate thread for improved performance.' || CHR(10) ||
'v2.2.0 (2025-12-19): DRY refactoring - extracted shared helper functions (sanitizeFilename, VALIDATE_TABLE_AND_COLUMNS, GET_PARTITIONS, EXPORT_SINGLE_PARTITION worker procedure). Reduced code duplication by ~400 lines. Prepared architecture for v2.3.0 parallel processing.' || CHR(10) ||
'v2.1.1 (2025-12-04): Fixed JOIN column reference A_WORKFLOW_HISTORY_KEY -> A_ETL_LOAD_SET_KEY, added consistent column mapping and dynamic column list to EXPORT_TABLE_DATA procedure, enhanced DEBUG logging for all export operations' || CHR(10) ||
'v2.1.0 (2025-10-22): Added version tracking and PARTITION_YEAR/PARTITION_MONTH support' || CHR(10) ||
'v2.0.0 (2025-10-01): Separated export functionality from FILE_MANAGER package' || CHR(10);
cgBL CONSTANT VARCHAR2(2) := CHR(13)||CHR(10);
vgMsgTmp VARCHAR2(32000);
---------------------------------------------------------------------------------------------------------------------------
@@ -149,8 +148,7 @@ AS
* Supports parallel partition processing via pParallelDegree parameter (1-16).
* File naming pattern: {pFileName}_YYYYMM.csv or {TABLENAME}_YYYYMM.csv (if pFileName is NULL)
* When pRegisterExport=TRUE, successfully exported files are registered in:
* - CT_MRDS.A_WORKFLOW_HISTORY (one record per YEAR/MONTH with export timestamp)
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location and partition info)
* - CT_MRDS.A_SOURCE_FILE_RECEIVED (tracks file location, size, checksum, and metadata)
* @example
* begin
* -- With custom filename
@@ -164,7 +162,7 @@ AS
* pMinDate => DATE '2024-01-01',
* pMaxDate => SYSDATE,
* pParallelDegree => 8, -- Optional, default 1, range 1-16
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_WORKFLOW_HISTORY and A_SOURCE_FILE_RECEIVED
* pRegisterExport => TRUE -- Optional, default FALSE, registers to A_SOURCE_FILE_RECEIVED
* );
*
* -- With auto-generated filename (based on table name only)
@@ -175,8 +173,8 @@ AS
* pBucketArea => 'ARCHIVE',
* pFolderName => 'exports',
* pMinDate => DATE '2025-09-01',
* pMaxDate => DATE '2025-09-17',
* pRegisterExport => TRUE -- Registers each export to tracking tables
* pMaxDate => DATE '2025-09-17'
* pRegisterExport => TRUE -- Registers each export to A_SOURCE_FILE_RECEIVED table
* );
* -- This will create files like: AGGREGATED_ALLOTMENT_202509.csv, etc.
* pBucketArea parameter accepts: 'INBOX', 'ODS', 'DATA', 'ARCHIVE'