DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'SCHEMA',
job_name => 'SCHEMA_EXPORT_JOB',
version => 'LATEST'
);
-- Add dump file (use %U for multiple files if large)
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
-- Add log file
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
-- Filter to specific schema(s)
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[IN ('YOUR_SCHEMA')]'
);
-- Exclude certain system objects if needed (common in RDS to avoid errors)
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'EXCLUDE_NAME_EXPR',
value => q'[IN (SELECT NAME FROM SYS.OBJ$ WHERE TYPE# IN (66,67,74,79,59,62,46) AND OWNER# IN (SELECT USER# FROM SYS.USER$ WHERE NAME IN ('RDSADMIN','SYS','SYSTEM')))]',
object_path => 'PROCOBJ'
);
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'SCHEMA',
job_name => 'SCHEMA_EXPORT_JOB',
version => 'LATEST'
);
-- Add dump file (use %U for multiple files if large)
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
-- Add log file
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
-- Filter to specific schema(s)
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[IN ('YOUR_SCHEMA')]'
);
-- Exclude certain system objects if needed (common in RDS to avoid errors)
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'EXCLUDE_NAME_EXPR',
value => q'[IN (SELECT NAME FROM SYS.OBJ$ WHERE TYPE# IN (66,67,74,79,59,62,46) AND OWNER# IN (SELECT USER# FROM SYS.USER$ WHERE NAME IN ('RDSADMIN','SYS','SYSTEM')))]',
object_path => 'PROCOBJ'
);
-- Enable parallelism (adjust degree based on instance)
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle); -- Detach to run in background
END;
/
DBMS_DATAPUMP.DETACH(v_handle); -- Detach to run in background
END;
/
Schema-Level Export with Parallelism
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'IMPORT',
job_mode => 'SCHEMA',
job_name => 'SCHEMA_IMPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export%U.dmp', -- Matches exported files
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_import.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
-- Optional: Remap schema if needed
-- DBMS_DATAPUMP.METADATA_REMAP(v_handle, 'REMAP_SCHEMA', 'OLD_SCHEMA', 'NEW_SCHEMA');
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
Schema-Level Import with Parallelism
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'IMPORT',
job_mode => 'SCHEMA',
job_name => 'SCHEMA_IMPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export%U.dmp', -- Matches exported files
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_import.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
-- Optional: Remap schema if needed
-- DBMS_DATAPUMP.METADATA_REMAP(v_handle, 'REMAP_SCHEMA', 'OLD_SCHEMA', 'NEW_SCHEMA');
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
Table-Level Export with Parallelism
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'TABLE',
job_name => 'TABLE_EXPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_export.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[IN ('YOUR_TABLE')]'
);
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
Table-Level Import with Parallelism
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'IMPORT',
job_mode => 'TABLE',
job_name => 'TABLE_IMPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_import.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[IN ('YOUR_TABLE')]'
);
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
Table-Level Export Including Specific Partitions with Parallelism
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'TABLE',
job_name => 'PARTITION_EXPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_export.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[= 'YOUR_PARTITIONED_TABLE']'
);
DBMS_DATAPUMP.DATA_FILTER(
handle => v_handle,
name => 'PARTITION_EXPR',
value => q'[IN ('PARTITION_Q1', 'PARTITION_Q2')]',
table_name => 'YOUR_PARTITIONED_TABLE',
schema_name => 'YOUR_SCHEMA'
);
-- Enable parallelism (effective for large partitions)
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
### Updated Examples with Parallelism
To enable parallelism in Data Pump jobs (for both exports and imports), add `DBMS_DATAPUMP.SET_PARALLEL(v_handle, degree => N);` after opening the handle and before starting the job. The `degree` specifies the number of parallel processes (e.g., 4 or 8, based on your RDS instance's vCPU count—check via `SELECT * FROM v$parameter WHERE name = 'cpu_count';`). RDS supports parallelism up to the instance limits (e.g., db.m5.large supports lower than db.m5.24xlarge). Higher degrees speed up large jobs but increase resource usage; start low and test. Parallelism works for schema, table, full modes, and with partitions. It requires multiple dump files (use `%U` in filename for auto-generation).
Note: Parallelism is effective if the dump files are on fast storage (RDS uses EBS), and for imports, ensure the target has sufficient resources. Monitor with `DBMS_DATAPUMP.GET_STATUS` or `V$SESSION_LONGOPS`.
#### Schema-Level Export with Parallelism
```sql
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'SCHEMA',
job_name => 'SCHEMA_EXPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export%U.dmp', -- %U for parallel files
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[IN ('YOUR_SCHEMA')]'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'EXCLUDE_NAME_EXPR',
value => q'[IN (SELECT NAME FROM SYS.OBJ$ WHERE TYPE# IN (66,67,74,79,59,62,46) AND OWNER# IN (SELECT USER# FROM SYS.USER$ WHERE NAME IN ('RDSADMIN','SYS','SYSTEM')))]',
object_path => 'PROCOBJ'
);
-- Enable parallelism (adjust degree based on instance)
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
```
#### Schema-Level Import with Parallelism
```sql
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'IMPORT',
job_mode => 'SCHEMA',
job_name => 'SCHEMA_IMPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_export%U.dmp', -- Matches exported files
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'schema_import.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
-- Optional: Remap schema if needed
-- DBMS_DATAPUMP.METADATA_REMAP(v_handle, 'REMAP_SCHEMA', 'OLD_SCHEMA', 'NEW_SCHEMA');
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
```
#### Table-Level Export with Parallelism
```sql
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'TABLE',
job_name => 'TABLE_EXPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_export.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[IN ('YOUR_TABLE')]'
);
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
```
#### Table-Level Import with Parallelism
```sql
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'IMPORT',
job_mode => 'TABLE',
job_name => 'TABLE_IMPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'table_import.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[IN ('YOUR_TABLE')]'
);
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
```
#### Table-Level Export Including Specific Partitions with Parallelism
```sql
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'EXPORT',
job_mode => 'TABLE',
job_name => 'PARTITION_EXPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_export.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[= 'YOUR_PARTITIONED_TABLE']'
);
DBMS_DATAPUMP.DATA_FILTER(
handle => v_handle,
name => 'PARTITION_EXPR',
value => q'[IN ('PARTITION_Q1', 'PARTITION_Q2')]',
table_name => 'YOUR_PARTITIONED_TABLE',
schema_name => 'YOUR_SCHEMA'
);
-- Enable parallelism (effective for large partitions)
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
```
#### Table-Level Import Including Specific Partitions with Parallelism
```sql
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'IMPORT',
job_mode => 'TABLE',
job_name => 'PARTITION_IMPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_import.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[= 'YOUR_PARTITIONED_TABLE']'
);
DBMS_DATAPUMP.DATA_FILTER(
handle => v_handle,
name => 'PARTITION_EXPR',
value => q'[IN ('PARTITION_Q1', 'PARTITION_Q2')]',
table_name => 'YOUR_PARTITIONED_TABLE',
schema_name => 'YOUR_SCHEMA'
);
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
```
Table-Level Import Including Specific Partitions with Parallelism
DECLARE
v_handle NUMBER;
BEGIN
v_handle := DBMS_DATAPUMP.OPEN(
operation => 'IMPORT',
job_mode => 'TABLE',
job_name => 'PARTITION_IMPORT_JOB',
version => 'LATEST'
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_export%U.dmp',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
);
DBMS_DATAPUMP.ADD_FILE(
handle => v_handle,
filename => 'partition_import.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'SCHEMA_EXPR',
value => q'[= 'YOUR_SCHEMA']'
);
DBMS_DATAPUMP.METADATA_FILTER(
handle => v_handle,
name => 'NAME_EXPR',
value => q'[= 'YOUR_PARTITIONED_TABLE']'
);
DBMS_DATAPUMP.DATA_FILTER(
handle => v_handle,
name => 'PARTITION_EXPR',
value => q'[IN ('PARTITION_Q1', 'PARTITION_Q2')]',
table_name => 'YOUR_PARTITIONED_TABLE',
schema_name => 'YOUR_SCHEMA'
);
-- Enable parallelism
DBMS_DATAPUMP.SET_PARALLEL(
handle => v_handle,
degree => 4
);
DBMS_DATAPUMP.START_JOB(v_handle);
DBMS_DATAPUMP.DETACH(v_handle);
END;
/
No comments:
Post a Comment