That's an important clarification. You want to execute the backup and then stop, leaving the metadata dump file safely in your S3 bucket without immediately importing it into a new database.
In that case, you only need the first four steps of the previous comprehensive script, but with a slight modification to ensure the local files are cleaned up properly after being uploaded to S3.
Here is the refined, comprehensive script designed specifically for backup and storage only. This script is a self-contained unit that you can run on your old Oracle RDS instance.
Important Note: You must replace the placeholder your-s3-bucket with your actual bucket name.
backup_metadata_and_store_to_s3.sql
SET SERVEROUTPUT ON;
-- -----------------------------------------------------------------------------
-- This script performs a metadata-only export of an Oracle RDS database,
-- uploads the resulting dump file and log file to an S3 bucket, and
-- cleans up the local files on the RDS instance.
--
-- This script does NOT perform an import. It is for backup and storage purposes only.
--
-- Prerequisites:
-- 1. An IAM Role with S3 permissions attached to your RDS instance.
-- 2. An S3 bucket in the same region as the RDS instance.
-- 3. You must be connected as the master user or a user with
-- appropriate privileges to use the rdsadmin and UTL_FILE packages.
-- -----------------------------------------------------------------------------
DECLARE
-- Variables for file and job management
l_file_handle UTL_FILE.FILE_TYPE;
l_dp_job_handle NUMBER;
l_job_state VARCHAR2(200);
l_dumpfile_name VARCHAR2(200);
l_log_file_name VARCHAR2(200);
l_task_id_dump VARCHAR2(100);
l_task_id_log VARCHAR2(100);
-- Customizable parameters
c_s3_bucket_name CONSTANT VARCHAR2(100) := 'your-s3-bucket';
c_s3_prefix CONSTANT VARCHAR2(100) := 'oracle/metadata_backups/';
BEGIN
-- ---------------------------------------------------------------------
-- Step 1: Create a Data Pump Parameter File
-- ---------------------------------------------------------------------
DBMS_OUTPUT.PUT_LINE('Creating Data Pump parameter file...');
-- Use a dynamic filename with a timestamp to ensure uniqueness
l_dumpfile_name := 'metadata_' || TO_CHAR(SYSDATE, 'YYYYMMDD_HH24MI') || '.dmp';
l_log_file_name := 'metadata_' || TO_CHAR(SYSDATE, 'YYYYMMDD_HH24MI') || '.log';
l_file_handle := UTL_FILE.FOPEN('DATA_PUMP_DIR', 'metadata_export.par', 'w');
UTL_FILE.PUT_LINE(l_file_handle, 'CONTENT=METADATA_ONLY');
UTL_FILE.PUT_LINE(l_file_handle, 'FULL=Y');
UTL_FILE.PUT_LINE(l_file_handle, 'EXCLUDE=STATISTICS');
UTL_FILE.PUT_LINE(l_file_handle, 'DUMPFILE=' || l_dumpfile_name);
UTL_FILE.PUT_LINE(l_file_handle, 'LOGFILE=' || l_log_file_name);
UTL_FILE.PUT_LINE(l_file_handle, 'PARALLEL=4');
-- Exclude system schemas and other unwanted objects to prevent errors
UTL_FILE.PUT_LINE(l_file_handle, 'EXCLUDE=SCHEMA:"IN(''APEX_040200'',''APEX_050000'',''APEX_050100'',''APEX_050200'',''APEX_050300'',''APEX_050400'',''APEX_180100'',''APEX_180200'',''APEX_190100'',''APEX_190200'',''APEX_200100'',''APEX_200200'',''APEX_210100'',''APEX_210200'',''APEX_220100'',''APEX_220200'',''APPQOSSYS'',''AUDSYS'',''CTXSYS'',''DBSNMP'',''DIP'',''DVSYS'',''GGSYS'',''GSMADMIN_INTERNAL'',''MDADM'',''MDSYS'',''OJVMSYS'',''ORACLE_OCM'',''ORDDATA'',''ORDPLUGINS'',''ORDSYS'',''OUTLN'',''PDBADMIN'',''SYS'',''SYSTEM'',''WMSYS'',''XDB'',''XS$NULL'') "');
UTL_FILE.FCLOSE(l_file_handle);
DBMS_OUTPUT.PUT_LINE('Parameter file `metadata_export.par` created.');
-- ---------------------------------------------------------------------
-- Step 2: Run the Data Pump Export Job
-- ---------------------------------------------------------------------
DBMS_OUTPUT.PUT_LINE('Starting Data Pump export job...');
l_dp_job_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'FULL',
job_name => 'METADATA_EXPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => l_dp_job_handle,
filename => 'metadata_export.par',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_PARAMETER_FILE
);
DBMS_DATAPUMP.START_JOB(l_dp_job_handle);
DBMS_DATAPUMP.DETACH(l_dp_job_handle);
DBMS_OUTPUT.PUT_LINE('Data Pump export job started. Monitoring status...');
-- ---------------------------------------------------------------------
-- Step 3: Wait for the Export to Complete (Polling)
-- ---------------------------------------------------------------------
LOOP
BEGIN
SELECT state INTO l_job_state
FROM dba_datapump_jobs
WHERE job_name = 'METADATA_EXPORT_JOB';
IF l_job_state = 'COMPLETING' OR l_job_state = 'COMPLETED' THEN
EXIT;
END IF;
DBMS_OUTPUT.PUT_LINE('Job state: ' || l_job_state || '... waiting 30 seconds.');
DBMS_LOCK.SLEEP(30);
EXCEPTION
WHEN NO_DATA_FOUND THEN
DBMS_OUTPUT.PUT_LINE('Data Pump job completed or detached.');
EXIT;
END;
END LOOP;
DBMS_OUTPUT.PUT_LINE('Data Pump export job finished.');
-- ---------------------------------------------------------------------
-- Step 4: Upload the Backup Files to S3
-- ---------------------------------------------------------------------
DBMS_OUTPUT.PUT_LINE('Uploading backup files to S3 bucket: ' || c_s3_bucket_name);
-- Upload the dump file
SELECT rdsadmin.rdsadmin_s3_tasks.upload_to_s3(
p_bucket_name => c_s3_bucket_name,
p_directory_name => 'DATA_PUMP_DIR',
p_s3_prefix => c_s3_prefix,
p_file_name => l_dumpfile_name
) INTO l_task_id_dump FROM DUAL;
DBMS_OUTPUT.PUT_LINE('Dump file upload task ID: ' || l_task_id_dump);
-- Upload the log file
SELECT rdsadmin.rdsadmin_s3_tasks.upload_to_s3(
p_bucket_name => c_s3_bucket_name,
p_directory_name => 'DATA_PUMP_DIR',
p_s3_prefix => c_s3_prefix,
p_file_name => l_log_file_name
) INTO l_task_id_log FROM DUAL;
DBMS_OUTPUT.PUT_LINE('Log file upload task ID: ' || l_task_id_log);
-- ---------------------------------------------------------------------
-- Step 5: Clean Up Local Files
-- ---------------------------------------------------------------------
DBMS_OUTPUT.PUT_LINE('Cleaning up local files on RDS instance...');
-- It's crucial to delete the dump and log files to free up RDS storage.
EXECUTE IMMEDIATE 'BEGIN UTL_FILE.FREMOVE(''DATA_PUMP_DIR'', ''' || 'metadata_export.par' || '''); END;';
EXECUTE IMMEDIATE 'BEGIN UTL_FILE.FREMOVE(''DATA_PUMP_DIR'', ''' || l_dumpfile_name || '''); END;';
EXECUTE IMMEDIATE 'BEGIN UTL_FILE.FREMOVE(''DATA_PUMP_DIR'', ''' || l_log_file_name || '''); END;';
DBMS_OUTPUT.PUT_LINE('All backup files and parameter file purged from DATA_PUMP_DIR.');
DBMS_OUTPUT.PUT_LINE('Backup process completed successfully!');
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE('An error occurred: ' || SQLERRM);
DBMS_OUTPUT.PUT_LINE('Please check the log file for details.');
RAISE;
END;
/
No comments:
Post a Comment