Oracle DBMS_DATAPUMP
Version 21c

General Information
Library Note Morgan's Library Page Header
ACE Director Alum Daniel Morgan, founder of Morgan's Library, is scheduling complimentary technical Workshops on Database Security for the first 30 Oracle Database customers located anywhere in North America, EMEA, LATAM, or APAC that send an email to asra_us@oracle.com. Request a Workshop for your organization today.
Purpose PL/SQL API to DataPump Import, Export, and Transportable Tablespaces
AUTHID CURRENT_USER
Constants
Name Data Type Value
Bitmap Definitions For TTS_CLOSURE_CHECK Parameter
KU$_TTS_CLOSURE_CHECK_ON BINARY_INTEGER 1
KU$_TTS_CLOSURE_CHECK_OFF BINARY_INTEGER 2
KU$_TTS_CLOSURE_CHECK_FULL BINARY_INTEGER 4
KU$_TTS_CLOSURE_CHECK_TEST BINARY_INTEGER 8
Bitmap Definitions For Transportable Parameter
KU$_TTS_NEVER BINARY_INTEGER 1
KU$_TTS_ALWAYS BINARY_INTEGER 2
KU$_TTS_KEEP_READ_ONLY BINARY_INTEGER 4
KU$_TTS_NO_BITMAP_REBLD BINARY_INTEGER 8
Dumpfile Types
KU$_DUMPFILE_TYPE_DISK BINARY_INTEGER 0
KU$_DUMPFILE_TYPE_PIPE BINARY_INTEGER 1
KU$_DUMPFILE_TYPE_TAPE BINARY_INTEGER 2
KU$_DUMPFILE_TYPE_TEMPLATE BINARY_INTEGER 3
KU$_DUMPFILE_TYPE_URIDISK BINARY_INTEGER 4
 
KU$_FILE_TYPE_DUMP_FILE BINARY_INTEGER 1
KU$_FILE_TYPE_BAD_FILE BINARY_INTEGER 2
KU$_FILE_TYPE_LOG_FILE BINARY_INTEGER 3
KU$_FILE_TYPE_SQL_FILE BINARY_INTEGER 4
KU$_FILE_TYPE_URIDUMP_FILE BINARY_INTEGER 5
 
KU$_JOB_COMPLETE NUMBER 1
KU$_JOB_COMPLETE_ERRORS NUMBER 2
KU$_JOB_STOPPED NUMBER 3
KU$_JOB_ABORTED NUMBER 4
 
KU$_JOB_VIEW_ALL NUMBER 0
KU$_JOB_TTS_TABLESPACES NUMBER 1
KU$_JOB_VIEW_ENCCOL_TABLES NUMBER 2
Mask Bit Definitions
KU$_STATUS_WIP BINARY_INTEGER 1
KU$_STATUS_JOB_DESC BINARY_INTEGER 2
KU$_STATUS_JOB_STATUS BINARY_INTEGER 4
KU$_STATUS_JOB_ERROR BINARY_INTEGER 8
 
KU$_STATUS_VERSION_1 NUMBER 1
KU$_STATUS_VERSION_2 NUMBER 2
KU$_STATUS_VERSION_3 NUMBER 3
KU$_STATUS_VERSION_4 NUMBER 4
KU$_STATUS_VERSION_5 NUMBER 5
KU$_STATUS_VERSION NUMBER KU$_STATUS_VERSION_5
 
KU$_DATAOPT_SKIP_CONST_ERR NUMBER 1
KU$_DATAOPT_XMLTYPE_CLOB NUMBER 2
KU$_DATAOPT_NOTYPE_EVOL NUMBER 4
KU$_DATAOPT_DISABL_APPEND_HINT NUMBER 8
KU$_DATAOPT_REJECT_ROWS_REPCHR NUMBER 16
KU$_DATAOPT_ENABLE_NET_COMP NUMBER 32
KU$_DATAOPT_GRP_PART_TAB NUMBER 64
KU$_DATAOPT_TRUST_EXIST_TB_PAR NUMBER 128
KU$_DATAOPT_VALIDATE_TBL_DATA NUMBER 256
KU$_DATAOPT_VERIFY_STREAM_FORM NUMBER 512
KU$_DATAOPT_CONT_LD_ON_FMT_ERR NUMBER 1024
 
KU$_ICRPP_PREIMPORT NUMBER 0
KU$_ICRPP_FINAL NUMBER 1
KU$_ICRPP_EARLY NUMBER 2
 
KU$_ICRFLAGS_IS_EXPR NUMBER 1
KU$_ICRFLAGS_EARLY_IMPORT NUMBER 2
KU$_ICRFLAGS_GET_DEPENDENTS NUMBER 4
KU$_ICRFLAGS_EXCLUDE NUMBER 8
KU$_ICRFLAGS_XDB_NO_TTS NUMBER 16
 
KU$_COMPRESS_NONE NUMBER 1
KU$_COMPRESS_METADATA NUMBER 2
Item Codes for entry in a dump file info table (of type ku$_dumpfile_info)
KU$_DFHDR_FILE_VERSION NUMBER 1
KU$_DFHDR_MASTER_PRESENT NUMBER 2
KU$_DFHDR_GUID NUMBER 3
KU$_DFHDR_FILE_NUMBER NUMBER 4
KU$_DFHDR_CHARSET_ID NUMBER 5
KU$_DFHDR_CREATION_DATE NUMBER 6
KU$_DFHDR_FLAGS NUMBER 7
KU$_DFHDR_JOB_NAME NUMBER 8
KU$_DFHDR_PLATFORM NUMBER 9
KU$_DFHDR_INSTANCE NUMBER 10
KU$_DFHDR_LANGUAGE NUMBER 11
KU$_DFHDR_BLOCKSIZE NUMBER 12
KU$_DFHDR_DIRPATH NUMBER 13
KU$_DFHDR_METADATA_COMPRESSED NUMBER 14
KU$_DFHDR_DB_VERSION NUMBER 15
KU$_DFHDR_MASTER_PIECE_COUNT NUMBER 16
KU$_DFHDR_MASTER_PIECE_NUMBER NUMBER 17
KU$_DFHDR_DATA_COMPRESSED NUMBER 18
KU$_DFHDR_METADATA_ENCRYPTED NUMBER 19
KU$_DFHDR_DATA_ENCRYPTED NUMBER 20
KU$_DFHDR_COLUMNS_ENCRYPTED NUMBER 21
Miscellaneous
KU$_DFHDR_HADOOP_TRAILERS NUMBER 24
KU$_DFHDR_TTSTM_DUMP NUMBER 25
KU$_DFHDR_MAX_ITEM_CODE NUMBER 25
Obsolete Modes: Deprecated
KU$_DFHDR_ENCPWD_MODE NUMBER 22
KU$_DFHDR_ENCPWD_MODE_UNKNOWN NUMBER 1
KU$_DFHDR_ENCPWD_MODE_NONE NUMBER 2
KU$_DFHDR_ENCPWD_MODE_PASSWORD NUMBER 3
KU$_DFHDR_ENCPWD_MODE_DUAL NUMBER 4
KU$_DFHDR_ENCPWD_MODE_TRANS NUMBER 5
Modes Returnable for item code KU$_DFHDR_ENCRYPTION_MODE
KU$_DFHDR_ENCRYPTION_MODE NUMBER 22
KU$_DFHDR_ENCMODE_UNKNOWN NUMBER 1
KU$_DFHDR_ENCMODE_NONE NUMBER 2
KU$_DFHDR_ENCMODE_PASSWORD NUMBER 3
KU$_DFHDR_ENCMODE_DUAL NUMBER 4
KU$_DFHDR_ENCMODE_TRANS NUMBER 5
Modes Returnable for item code KU$_DFHDR_COMPRESSION_ALG
KU$_DFHDR_COMPRESSION_ALG NUMBER 23
KU$_DFHDR_CMPALG_UNKNOWN NUMBER 1
KU$_DFHDR_CMPALG_NONE NUMBER 2
KU$_DFHDR_CMPALG_BASIC NUMBER 3
KU$_DFHDR_CMPALG_LOW NUMBER 4
KU$_DFHDR_CMPALG_MEDIUM NUMBER 5
KU$_DFHDR_CMPALG_HIGH NUMBER 6
Remap Flag
KU$_DATA_REMAP_WITH_ROWID NUMBER 1
Job Description Data Types CREATE TYPE sys.ku$_ParamValue1010 AS OBJECT (
param_name    VARCHAR2(30),
param_op      VARCHAR2(30),
param_type    VARCHAR2(30),
param_length  NUMBER,
param_value_n NUMBER,
param_value_t VARCHAR2(2000));
/

CREATE TYPE sys.ku$_ParamValues1010 AS TABLE OF sys.ku$_ParamValue1010;

CREATE TYPE sys.ku$_JobDesc1010 AS OBJECT (
job_name      VARCHAR2(30),
guid          RAW(16),
operation     VARCHAR2(30),
job_mode      VARCHAR2(30),
remote_link   VARCHAR2(4000),
owner         VARCHAR2(30),
instance      VARCHAR2(16),
db_version    VARCHAR2(30),
creator_privs VARCHAR2(30),
start_time    DATE,
max_degree    NUMBER,
log_file      VARCHAR2(4000),
sql_file      VARCHAR2(4000),
params ku$_ParamValues1010);
/

CREATE TYPE sys.ku$_JobDesc1020 IS OBJECT (
job_name        VARCHAR2(30),    -- The job name
guid            RAW(16),         -- The job GUID
operation       VARCHAR2(30),    -- Current operation
job_mode        VARCHAR2(30),    -- Current mode
remote_link     VARCHAR2(4000),  -- DB link, if any
owner           VARCHAR2(30),    -- Job owner
platform        VARCHAR2(101),   -- Current job platform
exp_platform    VARCHAR2(101),   -- Export platform
global_name     VARCHAR2(4000),  -- Current global name
exp_global_name VARCHAR2(4000),  -- Export global name
instance        VARCHAR2(16),    -- The instance name
db_version      VARCHAR2(30),    -- Version of objects
exp_db_version  VARCHAR2(30),    -- Export version
scn             NUMBER,          -- Job SCN
creator_privs   VARCHAR2(30),    -- Privs of job
start_time      DATE,            -- This job start time
exp_start_time  DATE,            -- Export start time
term_reason     NUMBER,          -- Job termination code
max_degree      NUMBER,          -- Max. parallelism
log_file        VARCHAR2(4000),  -- Log file name
sql_file        VARCHAR2(4000),  -- SQL file name
params          ku$_ParamValues1010)  -- Parameter list
/

CREATE TYPE sys.ku$_JobDesc1210 IS OBJECT (
job_name         VARCHAR2(30),        -- The job name
guid             RAW(16),             -- The job GUID
operation        VARCHAR2(30),        -- Current operation
job_mode         VARCHAR2(30),        -- Current mode
remote_link      VARCHAR2(4000),      -- DB link, if any
owner            VARCHAR2(30),        -- Job owner
platform         VARCHAR2(101),       -- Current job platform
exp_platform     VARCHAR2(101),       -- Export platform
global_name      VARCHAR2(4000),      -- Current global name
exp_global_name  VARCHAR2(4000),      -- Export global name
instance         VARCHAR2(16),        -- The instance name
db_version       VARCHAR2(30),        -- Cur. server software version
exp_db_version   VARCHAR2(30),        -- Export svr. software version
job_version      VARCHAR2(30),        -- Negotiated data version
scn              NUMBER,              -- Job SCN
creator_privs    VARCHAR2(30),        -- Privs of job
start_time       DATE,                -- This job start time
exp_start_time   DATE,                -- Export start time
term_reason      NUMBER,              -- Job termination code
max_degree       NUMBER,              -- Max. parallelism
timezone         VARCHAR2(64),        -- Cur. server timezone
exp_timezone     VARCHAR2(64),        -- Exp. server timezone
tstz_version     NUMBER,              -- Cur. server timezone version
exp_tstz_version NUMBER,              -- Exp. server timezone
endianness       VARCHAR2(16),        -- Cur. platform's endianness
exp_endianness   VARCHAR2(16),        -- Exp. platform's endianness endianness is 'BIG' or 'LITTLE'
charset          VARCHAR2(28),        -- Cur. server charset
exp_charset      VARCHAR2(28),        -- Exp. server charset
ncharset         VARCHAR2(28),        -- Cur. server national charset
exp_ncharset     VARCHAR2(28),        -- Exp. server national charset
log_file         VARCHAR2(4000),      -- Log file name
sql_file         VARCHAR2(4000),      -- SQL file name
params           ku$_ParamValues1010) -- Parameter list
/
DumpFile Types CREATE TYPE sys.ku$_DumpFile1010 AS OBJECT (
file_name          VARCHAR2(4000),
file_type          NUMBER,
file_size          NUMBER,
file_bytes_written NUMBER);
/

CREATE TYPE sys.ku$_DumpFileSet1010 AS TABLE OF sys.ku$_DumpFile1010;
/

CREATE TYPE sys.ku$_dumpfile_item IS OBJECT (
item_code NUMBER,           -- Identifies header item
value     VARCHAR2(2048));  -- Text string value
/
Executable-Package Equivalencies

The examples at right were written by Richard Harrison who invested a substantial effort in putting them together and are republished here with his permission. The original post can be found here.
Executable Package
ACCESS_METHOD dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'AUTOMATIC');
dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'DIRECT_PATH');
dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'EXTERNAL_TABLE');
CLUSTER dbms_datapump.start_job(xxx,xxx,xxx,cluster_ok=>0,xxx,xx); = 'N'
dbms_datapump.start_job(xxx,xxx,xxx,cluster_ok=>1,xxx,xx); = 'Y'
COMPRESSION dbms_datapump.set_parameter(handle => l_dp_handle, name => 'COMPRESSION', value => 'ALL');
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'COMPRESSION', value => 'DATA_ONLY');
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'COMPRESSION', value => 'METADATA_ONLY');
CONTENT dbms_datapump.set_parameter(handle => h1, name => 'INCLUDE_METADATA', value => 1); ALL
dbms_datapump.set_parameter(handle => h1, name => 'INCLUDE_METADATA', value => 0); DATA_ONLY
dbms_datapump.data_filter(handle => h1, name => 'INCLUDE_ROWS', value => 0); METADATA_ONLY
DATA_OPTIONS dbms_datapump.set_parameter(handle => l_dp_handle, name => 'DATA_OPTIONS', value => dbms_datapump.KU$_DATAOPT_XMLTYPE_CLOB);
DIRECTORY See dumpfile/logfile
DUMPFILE dbms_datapump.add_file(handle => l_dp_handle, filename => 'test.dmp', directory => 'SCRATCH');
ENCRYPTION dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'ALL');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'DATA_ONLY');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'ENCRYPTED_COLUMNS_ONLY');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'METADATA_ONLY');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'NONE');
ENCRYPTION_ALGORITHM dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_ALGORITHM', value => 'AES128');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_ALGORITHM', value => 'AES192');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_ALGORITHM', value => 'AES256');
ENCRYPTION_MODE dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_MODE', value => 'DUAL');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_MODE', value => 'PASSWORD');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_MODE', value => 'TRANSPARENT');
ENCRYPTION_PASSWORD dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_PASSWORD', value => 'secret password');
ESTIMATE dbms_datapump.set_parameter(handle => h1, name => 'ESTIMATE', value => 'BLOCKS');
ESTIMATE_ONLY dbms_datapump.set_parameter(handle => h1, name => 'ESTIMATE_ONLY', value => 1);
EXCLUDE dbms_datapump.metadata_filter(l_dp_handle, 'SCHEMA_EXPR', '<> (''SCHEMANAME'')');
dbms_datapump.metadata_filter(l_dp_handle, 'EXCLUDE_PATH_EXPR', 'IN (''INDEX'', ''SYNONYMS'', ''GRANTS'', ''STATISTICS'')');
FILESIZE dbms_datapump.add_file(handle => h1, filename => 'test.dmp', directory => 'SCRATCH',filesize => '1024000', reusefile => 1);
FLASHBACK_SCN dbms_datapump.set_parameter(handle => h1, name => 'FLASHBACK_SCN', value => nnnnnnnn);
dbms_datapump.set_parameter(handle => h1, name => 'FLASHBACK_SCN', value => TIMESTAMP_TO_SCN( TO_TIMESTAMP( TO_CHAR( SYSDATE))));
FLASHBACK_TIME dbms_datapump.set_parameter(handle => h1, name => 'FLASHBACK_TIME', value => 'TO_TIMESTAMP( TO_CHAR(SYSDATE) )');
FULL h1 := dbms_datapump.open(operation => 'EXPORT', job_mode => 'FULL', remote_link => NULL, version => 'LATEST');
INCLUDE dbms_datapump.metadata_filter(l_dp_handle, 'SCHEMA_EXPR', 'IN (''SCHEMANAME'')');
JOB_NAME h1 := dbms_datapump.open(operation => 'EXPORT', job_mode => 'FULL', remote_link => NULL, version => 'LATEST',job_name =>'MY JOB NAME');
KEEP_MASTER dbms_datapump.set_parameter(handle => l_dp_handle, name => 'KEEP_MASTER', value => 1);
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'KEEP_MASTER', value => 0);
LOGFILE dbms_datapump.add_file(handle => l_dp_handle, filename => 'test.log', directory => 'SCRATCH', filetype => dbms_datapump.KU$_FILE_TYPE_LOG_FILE, reusefile => 1);
METRICS dbms_datapump.metadata_filter(HANDLE => HANDLR, NAME => 'METRICS', VALUE => 0);
dbms_datapump.metadata_filter(HANDLE => HANDLR, NAME => 'METRICS', VALUE => 1);
NETWORK_LINK h1 := dbms_datapump.open(operation => 'EXPORT', job_mode => 'FULL', remote_link => 'DB_LINK_NAME', version => 'LATEST',job_name =>'MY JOB NAME');
NOLOGFILE Do not specify a logfile
PARALLEL dbms_datapump.set_parallel(handle => h1, degree => n);
QUERY dbms_datapump.data_filter(handle => h1, name => 'SUBQUERY', value => 'WHERE col1 = 1', table_name => null, schema_name => null);
REMAP_DATA dbms_datapump.data_remap(handle => h1, name => 'COLUMN_FUNCTION', table_name => 'TAB1', column => 'COL1', function => 'UWCLASS.TESTPACKAGE.SETTESTID', schema => 'UWCLASS');
REUSE_DUMPFILES dbms_datapump.add_file(handle => l_dp_handle, filename => 'test.dmp', directory => 'CTEMP', reusefile => 1);
SAMPLE dbms_datapump.data_filter(handle => h1, name => 'SAMPLE', value => '10, table_name => NULL, schema_name => NULL);
SCHEMAS dbms_datapump.metadata_filter(l_dp_handle, 'SCHEMA_LIST', '''ALIGNE''');
SERVICE_NAME dbms_datapump.start_job(l_dp_handle,service_name=>'MY_SERVICE_NAME');
SOURCE_EDITION dbms_datapump.set_parameter(handle => h1, name => 'SOURCE_EDITION', value => 'edition name');
TABLES dbms_datapump.metadata_filter(handle => h1, name => 'NAME_EXPR', value => 'IN (''TEST'',''TEST2'')', object_type => 'TABLE');
TABLESPACES dbms_datapump.metadata_filter(handle => h1, name => 'TABLESPACE_EXPR' value => 'IN (''TBS1'',''TBS2'')', object_type => 'TABLESPACE');
TRACE dbms_datapump.set_debug(debug_flags => TO_NUMBER('1FF0300','XXXXXXXXXXXXX'), version_flag=>1);
TRANSPORTABLE dbms_datapump.set_parameter(handle => h1, name => 'TRANSPORTABLE', value => 'ALWAYS');
dbms_datapump.set_parameter(handle => h1, name => 'TRANSPORTABLE', value => 'NEVER');
TRANSPORT_FULL_CHECK dbms_datapump.set_parameter(handle => h1, name => 'TTS_FULL_CHECK', value => 0); N
dbms_datapump.set_parameter(handle => h1, name => 'TTS_FULL_CHECK', value => 1); Y
VERSION h1 := dbms_datapump.open(operation => 'EXPORT', job_mode => 'FULL', remote_link => 'DB_LINK_NAME', version => 'LATEST', job_name =>'MY JOB NAME');
h1 := dbms_datapump.open(operation => 'EXPORT', job_mode => 'FULL', remote_link => 'DB_LINK_NAME', version => 'COMPATIBLE',job_name =>'MY JOB NAME');
(no equivalent) dbms_datapump.set_parameter(handle => h1, name => 'USER_METADATA', value => 1); export create user statement in schema mode (only if datapump_exp_full_database role is granted)
dbms_datapump.set_parameter(handle => h1, name => 'USER_METADATA', value => 0); don't export create user statement in schema mode
Job Status Types CREATE TYPE sys.ku$_JobStatus1010 AS OBJECT (
job_name           VARCHAR2(30),
operation          VARCHAR2(30),
job_mode           VARCHAR2(30),
bytes_processed    NUMBER,
total_bytes        NUMBER,
percent_done       NUMBER,
degree             NUMBER,
error_count        NUMBER,
state              VARCHAR2(30),
phase              NUMBER,
restart_count      NUMBER,
worker_status_list ku$_WorkerStatusList1010,
files              ku$_DumpFileSet1010)

CREATE TYPE sys.ku$_JobStatus1020 IS OBJECT (
job_name           VARCHAR2(30),  -- Name of the job
operation          VARCHAR2(30),  -- Current operation
job_mode           VARCHAR2(30),  -- Current mode
bytes_processed    NUMBER,        -- Bytes so far
total_bytes        NUMBER,        -- Total bytes for job
percent_done       NUMBER,        -- Percent done
degree             NUMBER,        -- Of job parallelism
error_count        NUMBER,        -- #errors so far
state              VARCHAR2(30),  -- Current job state
phase              NUMBER,        -- Job phase
restart_count      NUMBER,        -- #Job restarts
worker_status_list ku$_WorkerStatusList1020, -- For (non-idle) job worker processes
files ku$_DumpFileSet1010);  -- Dump file info
/

CREATE TYPE sys.ku$_JobStatus1120 IS OBJECT (
job_name           VARCHAR2(30),             -- Name of the job
operation          VARCHAR2(30),             -- Current operation
job_mode           VARCHAR2(30),             -- Current mode
bytes_processed    NUMBER,                   -- Bytes so far
total_bytes        NUMBER,                   -- Total bytes for job
percent_done       NUMBER,                   -- Percent done
degree             NUMBER,                   -- Of job parallelism
error_count        NUMBER,                   -- #errors so far
state              VARCHAR2(30),             -- Current job state
phase              NUMBER,                   -- Job phase
restart_count      NUMBER,                   -- #Job restarts
worker_status_list ku$_WorkerStatusList1120, -- For (non-idle) job worker processes
files ku$_DumpFileSet1010);                  -- Dump file info
/

CREATE TYPE sys.ku$_JobStatus1210 IS OBJECT
(
job_name        VARCHAR2(30), -- Name of the job
operation       VARCHAR2(30), -- Current operation
job_mode        VARCHAR2(30), -- Current mode
bytes_processed NUMBER, -- Bytes so far
total_bytes     NUMBER, -- Total bytes for job
percent_done    NUMBER, -- Percent done
degree          NUMBER, -- Of job parallelism
error_count     NUMBER, -- #errors so far
state           VARCHAR2(30), -- Current job state
phase           NUMBER, -- Job phase
restart_count   NUMBER, -- #Job restarts worker_status_list ku$_WorkerStatusList1210,
                        -- For (non-idle)
                        -- job worker processes
files           ku$_DumpFileSet1010) -- Dump file info
/

CREATE TYPE sys.ku$_Status1120 IS OBJECT (
mask            NUMBER,             -- Status types present
wip             ku$_LogEntry1010,   -- Work in progress
job_description ku$_JobDesc1020,    -- Complete job description
job_status      ku$_JobStatus1120,  -- Detailed job status
error           ku$_LogEntry1010)   -- Multi-level context errors
/

CREATE TYPE sys.ku$_Status1210 IS OBJECT (
mask            NUMBER,            -- Status types present
wip             ku$_LogEntry1010,  -- Work in progress
job_description ku$_JobDesc1210,   -- Complete job description
job_status      ku$_JobStatus1210, -- Detailed job status
error           ku$_LogEntry1010)  -- Multi-level context errors
/
Log Entry & Error Types CREATE TYPE sys.ku$_LogLine1010 AS OBJECT (
logLineNumber NUMBER,
errorNumber   NUMBER,
LogText       VARCHAR2(2000) );
/

CREATE PUBLIC SYNONYM ku$_LogLine1010
FOR sys.ku$_LogLine1010;

CREATE TYPE sys.ku$_LogEntry1010 AS TABLE OF
sys.ku$_LogLine1010;
/
CREATE PUBLIC SYNONYM ku$_LogEntry1010
FOR sys.ku$_LogEntry1010;
Status Types CREATE TYPE sys.ku$_status1010 AS OBJECT (
mask            NUMBER,
wip             ku$_LogEntry1010,
job_description ku$_JobDesc1010,
job_status      ku$_JobStatus1010,
error           ku$_LogEntry1010);
/

CREATE TYPE sys.ku$_Status1020 IS OBJECT (
mask            NUMBER,             -- Status types present
wip             ku$_LogEntry1010,   -- Work in progress
job_description ku$_JobDesc1020,    -- Complete job description
job_status      ku$_JobStatus1020,  -- Detailed job status
error           ku$_LogEntry1010)   -- Multi-level context errors
/

CREATE TYPE sys.ku$_Status1120 IS OBJECT (
mask            NUMBER,             -- Status types present
wip             ku$_LogEntry1010,   -- Work in progress
job_description ku$_JobDesc1020,    -- Complete job description
job_status      ku$_JobStatus1120,  -- Detailed job status
error           ku$_LogEntry1010)   -- Multi-level context errors
/
Worker Status Types CREATE TYPE sys.ku$_WorkerStatus1010 AS OBJECT (
worker_number     NUMBER,
process_name      VARCHAR2(30),
state             VARCHAR2(30),
schema            VARCHAR2(30),
name              VARCHAR2(4000),
object_type       VARCHAR2(200),
partition         VARCHAR2(30),
completed_objects NUMBER,
total_objects     NUMBER,
completed_rows    NUMBER,
completed_bytes   NUMBER,
percent_done      NUMBER)
/

CREATE TYPE sys.ku$_WorkerStatusList1010 AS TABLE OF sys.ku$_WorkerStatus1010
/

CREATE TYPE sys.ku$_WorkerStatus1020 AS OBJECT (
worker_number     NUMBER,          -- Worker process identifier
process_name      VARCHAR2(30),    -- Worker process name
state             VARCHAR2(30),    -- Worker process state
schema            VARCHAR2(30),    -- Schema name
name              VARCHAR2(4000),  -- Object name
object_type       VARCHAR2(200),   -- Object type
partition         VARCHAR2(30),    -- Partition name
completed_objects NUMBER,          -- Completed number of objects
total_objects     NUMBER,          -- Total number of objects
completed_rows    NUMBER,          -- Number of rows completed
completed_bytes   NUMBER,          -- Number of bytes completed
percent_done      NUMBER,          -- Percent done current object
degree            NUMBER);         -- Degree of parallelism
/

CREATE TYPE sys.ku$_WorkerStatus1120 AS OBJECT (
worker_number     NUMBER,         -- Worker process identifier
process_name      VARCHAR2(30),   -- Worker process name
state             VARCHAR2(30),   -- Worker process state
schema            VARCHAR2(30),   -- Schema name
name              VARCHAR2(4000), -- Object name
object_type       VARCHAR2(200),  -- Object type
partition         VARCHAR2(30),   -- Partition name
completed_objects NUMBER,         -- Completed number of objects
total_objects     NUMBER,         -- Total number of objects
completed_rows    NUMBER,         -- Number of rows completed
completed_bytes   NUMBER,         -- Number of bytes completed
percent_done      NUMBER,         -- Percent done current object
degree            NUMBER,         -- Degree of parallelism
instance_id       NUMBER);        -- Instance ID where running
/

CREATE TYPE sys.ku$_WorkerStatus1210 AS OBJECT
(
worker_number     NUMBER,         -- Worker process identifier
process_name      VARCHAR2(30),   -- Worker process name
state             VARCHAR2(30),   -- Worker process state
schema            VARCHAR2(30),   -- Schema name
name              VARCHAR2(4000), -- Object name
object_type       VARCHAR2(200),  -- Object type
partition         VARCHAR2(30),   -- Partition name
completed_objects NUMBER,         -- Completed number of objects
total_objects     NUMBER,         -- Total number of objects
completed_rows    NUMBER,         -- Number of rows completed
completed_bytes   NUMBER,         -- Number of bytes completed
percent_done      NUMBER,         -- Percent done current object
degree            NUMBER,         -- Degree of parallelism
instance_id       NUMBER,         -- Instance ID where running
instance_name     VARCHAR2(60),   -- Instance Name where running
host_name         VARCHAR2(64));  -- Host name where running

CREATE TYPE sys.ku$_WorkerStatusList1010 AS TABLE OF sys.ku$_WorkerStatus1010
/
CREATE TYPE sys.ku$_WorkerStatusList1020 AS TABLE OF sys.ku$_WorkerStatus1020
/
CREATE TYPE sys.ku$_WorkerStatusList1120 AS TABLE OF sys.ku$_WorkerStatus1120
/
CREATE TYPE sys.ku$_WorkerStatusList1210 AS TABLE OF sys.ku$_WorkerStatus1210
/
Dependencies
AMGT$DATAPUMP KU$_DUMPFILESET1010 KUPC$_JOBINFO
DBMS_AQ_SYS_IMP_INTERNAL KU$_DUMPFILE_INFO KUPC$_LOG_ENTRY
DBMS_ASSERT KU$_JOBDESC KUPC$_LOG_ERROR
DBMS_AW_EXP KU$_JOBDESC1010 KUPC$_MASTERERROR
DBMS_CSX_ADMIN KU$_JOBDESC1020 KUPC$_MASTERJOBINFO
DBMS_CSX_INT KU$_JOBDESC1210 KUPC$_MASTER_KEY_EXCHANGE
DBMS_CUBE_EXP KU$_JOBDESC1220 KUPC$_MESSAGE
DBMS_DATAPUMP_INT KU$_JOBSTATUS KUPC$_METADATA_FILTER
DBMS_DATAPUMP_UTL KU$_JOBSTATUS1010 KUPC$_METADATA_REMAP
DBMS_DM_EXP_INTERNAL KU$_JOBSTATUS1020 KUPC$_METADATA_TRANSFORM
DBMS_DM_MODEL_EXP KU$_JOBSTATUS1120 KUPC$_OPEN
DBMS_DM_MODEL_IMP KU$_JOBSTATUS1210 KUPC$_RESTART
DBMS_DST KU$_JOBSTATUS1220 KUPC$_SET_PARALLEL
DBMS_FILE_GROUP_UTL_INVOK KU$_LOGENTRY KUPC$_SET_PARAMETER
DBMS_GSM_DBADMIN KU$_PARAMVALUE KUPC$_SHADOW_KEY_EXCHANGE
DBMS_LOB KU$_PARAMVALUE1010 KUPC$_START_JOB
DBMS_LOCK KU$_PARAMVALUES KUPC$_STOP_JOB
DBMS_LOGREP_EXP KU$_PARAMVALUES1010 KUPC$_STOP_WORKER
DBMS_LOGREP_IMP KU$_STATUS KUPCC
DBMS_METADATA KU$_STATUS1010 KUPD$DATA
DBMS_METADATA_DIFF KU$_STATUS1020 KUPF$FILE
DBMS_METADATA_UTIL KU$_STATUS1120 KUPM$MCP
DBMS_NETWORK_ACL_ADMIN KU$_STATUS1210 KUPP$PROC
DBMS_OUTPUT KU$_STATUS1220 KUPU$UTILITIES
DBMS_PRIV_CAPTURE KU$_WORKERSTATUS KUPU$UTILITIES_INT
DBMS_RULE_EXP_UTL KU$_WORKERSTATUS1010 KUPUTIL
DBMS_SCHED_MAIN_EXPORT KU$_WORKERSTATUS1020 KUPV$FT
DBMS_SMB_INTERNAL KU$_WORKERSTATUS1120 KUPV$FT_INT
DBMS_SQLTCB_INTERNAL KU$_WORKERSTATUS1210 KUPW$WORKER
DBMS_STATS KU$_WORKERSTATUS1220 LBAC_EXP
DBMS_STREAMS_DATAPUMP KU$_WORKERSTATUSLIST LBAC_SYSDBA
DBMS_STREAMS_DATAPUMP_UTIL KU$_WORKERSTATUSLIST1010 LOGSTDBY_INTERNAL
DBMS_STREAMS_TABLESPACE_ADM KU$_WORKERSTATUSLIST1020 LT_EXPORT_PKG
DBMS_STREAMS_TBS_INT_INVOK KU$_WORKERSTATUSLIST1120 OLS$DATAPUMP
DBMS_SWRF_INTERNAL KU$_WORKERSTATUSLIST1210 OLS_UTIL_WRAPPER
DBMS_SYS_ERROR KU$_WORKERSTATUSLIST1220 OWM_VSCRIPT_PKG
DBMS_UTILITY KUPC$QUEUE PLITBLM
DBMS_WRR_INTERNAL KUPC$QUEUE_INT PSTDY_DATAPUMP_SUPPORT
DBMS_XDBUTIL_INT KUPC$_ADD_DEVICE SA_COMPONENTS
DMP_SEC KUPC$_ADD_FILE TSDP$DATAPUMP
DRIIMP KUPC$_API_ACK UTL_XML
KU$_DUMPFILE KUPC$_DATA_FILTER XIMETADATA_PKG
KU$_DUMPFILE1010 KUPC$_DATA_REMAP XMLTYPE
KU$_DUMPFILESET    
Documented Yes: Packages and Types Reference
Exceptions
Error Code Reason
ORA-39001 INVALID_ARGVAL: An invalid value was supplied for an input parameter
ORA-39002 INVALID_OPERATION: Cannot be executed because of inconsistencies between the API and the job
ORA-39004 INVALID_STATE: The state of the job precludes the execution of the API
ORA-39005 INCONSISTENT_ARGS: Inconsistent arguments 
ORA-39006 INTERNAL_ERROR: Internal datapump exception
ORA-31623 INVALID_HANDLE: Incorrect handle specified for the job
ORA-31626 NO_SUCH_JOB: A invalid reference to a job which is no longer executing
ORA-31627 SUCCESS_WITH_INFO: User specified job parameters that yielded informational messages
ORA-31631 PRIVILEGE_ERROR: The necessary privileges are not available for operations
ORA-31634 JOB_EXISTS: Job creation or restart failed due to duplicate name
ORA-39211 NO_DUMPFILE_INFO: User specified an invalid or inaccessible file
ORA-39327 WARNING_DV_NOENCRYPT: Database Vault data is being stored unencrypted in dump file set
Filters
Name Object Type Meaning
INCLUDE_NAME_EXPR
and
EXCLUDE_NAME_EXPR
Named objects Defines which object names are included in (INCLUDE_NAME_EXPR), or excluded from (EXCLUDE_NAME_EXPR), the job. You use the object_type parameter to limit the filter to a particular object type.

For Table mode, identifies which tables are to be processed.
SCHEMA_EXPR Schema objects Restricts the job to objects whose owning schema name is satisfied by the expression.

For Table mode, only a single SCHEMA_EXPR filter is supported. If specified, it must only specify a single schema (for example, 'IN (''SCOTT'')').

For Schema mode, identifies which users are to be processed.
TABLESPACE_EXPR Table, Cluster, Index, Rollback Segment Restricts the job to objects stored in a tablespace whose name is satisfied by the expression.

For Tablespace mode, identifies which tablespaces are to be processed. If a partition of an object is stored in the tablespace, the entire object is added to the job.

For Transportable mode, identifies which tablespaces are to be processed. If a table has a single partition in the tablespace set, all partitions must be in the tablespace set. An index is not included within the tablespace set unless all of its partitions are in the tablespace set. A domain index is not included in the tablespace set unless all of its secondary objects are included in the tablespace set.
INCLUDE_PATH_EXPR and EXCLUDE_PATH_EXPR All Defines which object paths are included in, or excluded from, the job. You use these filters to select only certain object types from the database or dump file set. Objects of paths satisfying the condition are included (INCLUDE_PATH_EXPR) or excluded (EXCLUDE_PATH_EXPR) from the operation. The object_path parameter is not supported for these filters.
First Available 10.1
Remaps
Name Data Type Object Type Meaning
REMAP_DATAFILE Text Library, Tablespace, Directory Any datafile reference in the job that matches the object_type parameter and referenced the old_value datafile will be redefined to use the value datafile.
REMAP_SCHEMA Text Schema Objects Any schema object in the job that matches the object_type parameter and was located in the old_value schema will be moved to the value schema.
REMAP_TABLESPACE Text Table, Index, Rollback Segment, Materialized View, Materialized View Log, Tablespace Any storage segment in the job that matches the object_type parameter and was located in the old_value tablespace will be relocated to the value tablespace.
Security Model Owned by SYS with EXECUTE granted to PUBLIC
System Privileges
CREATE DIRECTORY EXP_FULL_DATABASE IMP_FULL_DATABASE
Source {ORACLE_HOME}/rdbms/admin/dbmsdp.sql
{ORACLE_HOME}/rdbms/admin/dbmspump.sql
Subprograms
Transforms
Name Data Type Object Type Description
SEGMENT_ATTRIBUTES Number Table, Index If nonzero (TRUE), emit storage segment parameters. Defaults to 1
STORAGE Number Table If nonzero (TRUE), emit storage clause. (Ignored if SEGMENT_ATTRIBUTES is zero.) Defaults to nonzero (TRUE)
 
ADD_DEVICE
Adds a sequential device to the dump file set for Export, Import, or Sql_file operations dbms_datapump.add_device(
handle      IN NUMBER,                  -- job handle
device_name IN VARCHAR2,                -- name of device being added
volume_size IN VARCHAR2 DEFAULT NULL);  -- device storage capacity
TBD
 
ADD_FILE
Adds files to the dump file set for an Export, Import, or SQL_FILE operation or specifies the log file or the output file for a SQL_FILE operation dbms_datapump.add_file(
handle    IN NUMBER,
filename  IN VARCHAR2,
directory IN VARCHAR2 DEFAULT NULL,
filesize  IN VARCHAR2 DEFAULT NULL,
filetype  IN NUMBER   DEFAULT dbms_datapump.ku$_file_type_dump_file,
reusefile IN NUMBER   DEFAULT NULL);
See Export Demo Below
 
ATTACH
Used to gain access to a Data Pump job that is in the Defining, Executing, Idling, or Stopped state dbms_datapump.attch(
job_name  IN VARCHAR2 DEFAULT NULL,
job_owner IN VARCHAR2 DEFAULT NULL)
RETURN NUMBER;
See LOG_ENTRY Demo Below
 
CLIENT_LOB_APPEND
CLOB helper routine dbms_datapump.client_lob_append(
value    IN VARCHAR2,
position IN NUMBER
as_is    IN NUMBER DEFAULT 0);
TBD
 
CLIENT_LOB_DELETE
CLOB helper routine dbms_datapump.client_lob_delete;
TBD
 
CLIENT_LOB_GET
CLOB helper routine dbms_datapump.client_lob_get RETURN CLOB;
TBD
 
CREATE_JOB_VIEW
Create view into master table for a job

Overload 1
dbms_datapump.create_job_view(
job_schema IN VARCHAR2,
job_name   IN VARCHAR2,
view_name  IN VARCHAR2,
view_type  IN VARCHAR2 DEFAULT dbms_datapump.ku$_job_view_all);
TBD
Overload 2 dbms_datapump.create_job_view(
handle    IN NUMBER,
view_name IN VARCHAR2,
view_type IN VARCHAR2 DEFAULT dbms_datapump.ku$_job_view_all);
TBD
 
DATAPUMP_JOB
Returns TRUE if the code executing is a datapump job: Otherwise FALSE dbms_datapump.datapump_job RETURN BOOLEAN;
set serveroutput on

BEGIN
  IF dbms_datapump.datapump_job THEN
    dbms_output.put_line('Yes');
  ELSE
    dbms_output.put_line('No');
  END IF;
END;
/
 
DATA_FILTER
Specifies restrictions on the rows that are to be retrieved

Overload 1
dbms_datapump.data_filter (
handle      IN NUMBER,
name        IN VARCHAR2,               -- filter name
value       IN NUMBER,
table_name  IN VARCHAR2 DEFAULT NULL,  -- if not specified = all
schema_name IN VARCHAR2 DEFAULT NULL);
TBD
Overload 2 dbms_datapump.data_filter(
handle      IN NUMBER,
name        IN VARCHAR2,
value       IN CLOB,
table_name  IN VARCHAR2 DEFAULT NULL,
schema_name IN VARCHAR2 DEFAULT NULL);
TBD
Overload 3 dbms_datapump.data_filter(
handle      IN NUMBER,
name        IN VARCHAR2,
value       IN VARCHAR2,
table_name  IN VARCHAR2 DEFAULT NULL,
schema_name IN VARCHAR2 DEFAULT NULL);
TBD
 
DATA_REMAP
Modify the values of data in user tables dbms_datapump.data_remap(
handle      IN NUMBER,      -- job handle
name        IN VARCHAR2,
table_name  IN VARCHAR2,
column      IN VARCHAR2,
function    IN VARCHAR2,
schema      IN VARCHAR2 DEFAULT NULL,
remap_flags IN NUMBER   DEFAULT 0); -- alt. value dbms_datapump.ku$_data_remap_with_rowid
TBD
 
DETACH
Specifies that the user has no further interest in using the handle dbms_datapump.detach(handle IN NUMBER);
See Export Demo Below
 
DH_KEY_EXCHANGE
Undocumented dbms_datapump.dh_key_exchange(
handle     IN  NUMBER,
source_key IN  VARCHAR2,
target_key OUT VARCHAR2,
digest     OUT VARCHAR2);
TBD
 
END_SW_UPDATE
  dbms_datapump.
TBD
 
ESTABLISH_REMOTE_CONTEXT
Establish remote Data Pump job context dbms_datapump.establish_remote_context(worker_id IN NUMBER, remote_link IN VARCHAR2);
TBD
 
GET_DUMPFILE_INFO
Monitors the status of a job or waits for the completion of a job dbms_datapump.get_dumpfile_info(
file_name  IN  VARCHAR2,
directory  IN  VARCHAR2,
info_table OUT dbms_datapump.ku$_dumpfile_info,
filetype   OUT NUMBER);
TBD
 
GET_OBJECT_INFO
Returns information about an object from the master table dbms_datapump.get_object_info(
object_owner      IN  VARCHAR2,
object_name       IN  VARCHAR2,
object_type       IN  VARCHAR2,
processing_status OUT VARCHAR2,
processing_state  OUT VARCHAR2);
SQL> DECLARE
  2   pstatus VARCHAR2(30);
  3   pstate VARCHAR2(30);
  4  BEGIN
  5    dbms_datapump.get_object_info('UWCLASS', 'SERVERS', 'TABLE', pstatus, pstate);
  6    dbms_output.put_line(pstatus);
  7    dbms_output.put_line(pstate);
  8  END;
  9 /
DECLARE
*
ERROR at line 1:
ORA-31623: a job is not attached to this session via the specified handle
ORA-06512: at "SYS.KUPV$FT_INT", line 1872
ORA-06512: at "SYS.DBMS_SYS_ERROR", line 79
ORA-06512: at "SYS.KUPV$FT_INT", line 1805
ORA-06512: at "SYS.DBMS_DATAPUMP", line 2266
ORA-06512: at "SYS.DBMS_DATAPUMP", line 7526
ORA-06512: at line 5
 
GET_STATUS
Monitors the status of a job or waits for the completion of a job or for more details on API errors

Overload 1
dbms_datapump.get_status(
handle  IN NUMBER,
mask    IN INTEGER,
timeout IN NUMBER DEFAULT NULL)
RETURN dbms_datapump.ku$_status;
TBD
Overload 2 dbms_datapump.get_status(
handle    IN  NUMBER,
mask      IN  INTEGER,
timeout   IN  NUMBER DEFAULT NULL,
job_state OUT VARCHAR2,
status    OUT ku$_status1010);
TBD
Overload 3 dbms_datapump.get_status(
handle     IN  NUMBER,
mask       IN  INTEGER,
timeout    IN  NUMBER DEFAULT NULL,
job_state  OUT VARCHAR2,
status     OUT ku$_status1020);
See Export Demo Below
Overload 4 dbms_datapump.get_status(
handle     IN  NUMBER,
mask       IN  INTEGER,
timeout    IN  NUMBER DEFAULT NULL,
job_state  OUT VARCHAR2,
status     OUT ku$_status1120);
See Export Demo Below
Overload 5 dbms_datapump.get_status(
handle    IN  NUMBER,
mask      IN  INTEGER,
timeout   IN  NUMBER DEFAULT NULL,
job_state OUT VARCHAR2,
status    OUT ku$_Status1210);
TBD
Overload 6 dbms_datapump.get_status(
handle    IN  NUMBER,
mask      IN  INTEGER,
timeout   IN  NUMBER DEFAULT NULL,
job_state OUT VARCHAR2,
status    OUT ku$_Status1220);
TBD
 
GET_STATUS_VERSION
Determine ku$_Status object version to use for network operations dbms_datapump.get_status_version(version IN NUMBER) RETURN NUMBER;
TBD
 
HAS_PRIVS
Returns TRUE is the privilege has been granted: Otherwise FALSE dbms_datapump.has_privs(oper IN VARCHAR2) RETURN BOOLEAN;
set serveroutput on

BEGIN
  IF dbms_datapump.has_privs('EXPORT') THEN
    dbms_output.put_line('Yes');
  ELSE
    dbms_output.put_line('No');
  END IF;
END;
/
 
LOG_ENTRY
Inserts a message into the log file dbms_datapump.log_entry (
handle        IN NUMBER,
message       IN VARCHAR2
log_file_only IN NUMBER DEFAULT 0);
DECLARE
 dph NUMBER;
BEGIN
  dph := dbms_datapump.attach('EXAMPLE5', USER);
  dbms_output.put_line(dph);

  dbms_datapump.log_entry(dph, 'Log entry from DataPump API');
  dbms_datapump.detach(dph);
END;
/
 
LOG_ERROR
Undocumented dbms_datapump.log_error(
handle        IN NUMBER,
message       IN VARCHAR2,
error_number  IN NUMBER DEFAULT 0,
fatal_error   IN NUMBER DEFAULT 0,
log_file_only IN NUMBER DEFAULT 0);
TBD
 
METADATA_FILTER
Creates a filters that restricts the items that are included in a job

Overload 1
dbms_datapump.metadata_filter (
handle      IN NUMBER,
name        IN VARCHAR2,
value       IN VARCHAR2,
object_path IN VARCHAR2 DEFAULT NULL,
object_type IN VARCHAR2 DEFAULT NULL);


Transform names and values (valid for both overloads)
Filter Name Transform Values Data Type Applicable Object Types
NAME_EXPR A single named object TEXT All
NAME_LIST A comma-separated list of objects TEXT All
SCHEMA_EXPR A single named schema TEXT Schema
SCHEMA_LIST A comma-separated list of schemas TEXT Schemas
TABLESPACE_EXPR A single named tablespace TEXT Table, Cluster, Index
TABLESPACE_LIST A comma-separated list of tablespaces TEXT Table, Cluster, Index
INCLUDE_PATH_EXPR A TEXT All
INCLUDE_PATH_LIST A comma-separated list of TEXT All
EXCLUDE_PATH_EXPR   TEXT All
EXCLUDE_PATH_LIST A comma-separated list of TEXT All
EXCLUDE_TABLES A comma-separated list of tables to be excluded (not exported) TEXT Table Export
TABLES_AS_VIEWS A comma-separated list of views to be exported as tables TEXT Table Export
See Export Demo Below
Overload 2 dbms_datapump.metadata_filter (
handle      IN NUMBER,
name        IN VARCHAR2,
value       IN CLOB,
object_path IN VARCHAR2 DEFAULT NULL,
object_type IN VARCHAR2 DEFAULT NULL);
TBD
 
METADATA_REMAP
Specifies a remapping to be applied to objects as they are processed in the specified job dbms_datapump.metadata_remap (
handle      IN NUMBER,
name        IN VARCHAR2,
old_value   IN VARCHAR2,
value       IN VARCHAR2,
object_type IN VARCHAR2 DEFAULT NULL);
TBD
 
METADATA_TRANSFORM
Specifies transformations to be applied to objects as they are processed in the specified job

Overload 1
dbms_datapump.metadata_transform (
handle      IN NUMBER,
name        IN VARCHAR2,
value       IN VARCHAR2,
object_type IN VARCHAR2 DEFAULT NULL);


Transform names and values (valid for both overloads)
Transform Name Transform Values Data Type Applicable Object Types
DISABLE_ARCHIVE_LOGGING N or Y: The default is TEXT TABLE, INDEX
LOB_STORAGE BASICFILE, DEFAULT, NO_CHANGE, SECUREFILE TEXT TABLE
OID 0 or 1: The default is 1 NUMBER TYPE TABLE
PCTSPACE Defaults is 100 NUMBER TABLE, INDEX, TABLESPACE
SEGMENT_ATTRIBUTES If non-zero drop storage segment params: Default is 1 NUMBER TABLE, INDEX
SEGMENT_CREATION If non-zero will include the segment creation clause: Default is 1 NUMBER TABLE
STORAGE If non-zero drops the storage clause: Default is 1 NUMBER TABLE
TABLE_COMPRESSION_CLAUSE Specify NONE to no specify a compression clause TEXT TABLE

The list of object types can be found in the DATABASE_EXPORT_OBJECTS, SCHEMA_EXPORT_OBJECTS, TABLE_EXPORT_OBJECTS, and the TABLESPACE_EXPORT_OBJECTS views.
TBD
Overload 2 dbms_datapump.metadata_transform (
handle      IN NUMBER,
name        IN VARCHAR2,
value       IN NUMBER,
object_type IN VARCHAR2 DEFAULT NULL);


The list of object types can be found in the DATABASE_EXPORT_OBJECTS, SCHEMA_EXPORT_OBJECTS, TABLE_EXPORT_OBJECTS, and the TABLESPACE_EXPORT_OBJECTS views.
TBD
 
OPEN
Declare a new job using the Data Pump API. The handle that is returned is used as a parameter for calls to all other procedures except ATTACH. dbms_datapump.open (
operation   IN VARCHAR2,
job_mode    IN VARCHAR2,
remote_link IN VARCHAR2 DEFAULT NULL,
job_name    IN VARCHAR2 DEFAULT NULL,
version     IN VARCHAR2 DEFAULT 'COMPATIBLE',
compression IN NUMBER DEFAULT dbms_datapump.ku$_compress_metadata)
RETURN NUMBER;


Job Mode Description
FULL Full database excluding the SYS, XDB,ORDSYS, MDSYS, CTXSYS, ORDPLUGINS, and LBACSYS schemas
SCHEMA Operates on a set of selected schemas. Defaults to current user. All objects in the selected schemas are processed. Users cannot specify schemas noted above
TABLE Operates on a set of selected tables. Defaults to all of the tables in the current user's schema. Only tables and their dependent objects are processed.
TABLESPACE Operates on a set of selected tablespaces. No defaulting is performed. Tables that have storage in the specified tablespaces are processed in the same manner as in Table mode.
TRANSPORTABLE Operates on metadata for tables (and their dependent objects) within a set of selected tablespaces to perform a transportable tablespace export/import.
See Export Demo Below
 
SETUP_REMOTE_CONTEXT
Sets up a remote Data Pump job context dbms_datapump.setup_remote_context(
user_name      IN VARCHAR2,
job_name       IN VARCHAR2,
version        IN NUMBER,
status_xml     IN VARCHAR2,
status_xml_len IN NUMBER,
more           IN NUMBER);
TBD
 
SET_DEBUG
Sets the internal debug switch

Overload 1
dbms_datapump.set_debug(
on_off  IN NUMBER,
ip_addr IN VARCHAR2 DEFAULT NULL);
TBD
Overload 2 dbms_datapump.set_debug(
debug_flags  IN BINARY_INTEGER
,
version_flag IN BINARY_INTEGER);
TBD
 
SET_PARALLEL
Adjusts the degree of parallelism within a job dbms_datapump.set_parallel (handle IN NUMBER, degree IN NUMBER);
See Export Demo Below
 
SET_PARMETER
Specify job-processing options

Overload 1
dbms_datapump.set_parameter(
handle IN NUMBER,
name   IN VARCHAR2,
value  IN VARCHAR2);
TBD
Overload 2 dbms_datapump.set_parameter (
handle IN NUMBER,
name   IN VARCHAR2,
value  IN NUMBER);
TBD
 
SET_REMOTE_WORKER
Sets worker for remote job context dbms_datapump.set_remote_worker(worker IN NUMBER);
TBD
 
START_JOB
Begins or resumes job execution dbms_datapump.start_job (
handle       IN NUMBER,
skip_current IN NUMBER DEFAULT 0,
abort_step   IN NUMBER DEFAULT 0,
cluster_ok   IN NUMBER DEFAULT 1,
service_name IN VARCHAR2 DEFAULT NULL);
See Export Demo Below
 
START_SW_UPDATE
  dbms_datapump.
TBD
 
STOP_JOB
Terminates a job, but optionally, preserves the state of the job dbms_datapump.stop_job (
handle      IN NUMBER,
immediate   IN NUMBER DEFAULT 0,
keep_master IN NUMBER DEFAULT NULL,
delay       IN NUMBER DEFAULT 60);
See Export Demo Below
 
STOP_WORKER
Kills a hung worker process dbms_datapump.stop_worker (
handle       IN NUMBER,
worker_id    IN NUMBER,
skip_current IN NUMBER DEFAULT 0);
TBD
 
TEST_FUNCTIONALITY
Undocumented dbms_datapump.test_functionality(
bug_number   IN  NUMBER, -- bug number to check
patch_exists OUT NUMBER);
TBD
 
TEST_REMOTE_CONTEXT1010
Test if remote Data Pump job context is version 10.1 dbms_datapump.test_remove_context1010;
TBD
 
TEST_REMOTE_CONTEXT1020
Test if remote Data Pump job context is version 10.2 dbms_datapump.test_remote_context1020;
TBD
 
TEST_REMOTE_CONTEXT1120
Test if remote Data Pump job context is version 11.2 dbms_datapump.test_remote_context1120;
TBD
 
TEST_REMOTE_CONTEXT1210
Test if remote Data Pump job context is version 12.1 dbms_datapump.test_remote_context1210;
TBD
 
TRACE_ENTRY
Writes a trace message to the current process's trace file if debugging is enabled dbms_datapump.trace_entry(
facility IN VARCHAR2,
msg      IN VARCHAR2);
TBD
 
WAIT_FOR_JOB
Runs a job until it either completes normally or stops for some other reason dbms_datapump.wait_for_job(
handle    IN  NUMBER,
job_state OUT VARCHAR2);
TBD
 
DataPump Demos
DataPump Export conn / as sysdba

set linesize 121
col owner format a20
col directory_name format a15
col directory_path format a60

SELECT *
FROM dba_directories;

HOST mkdir c:\expimp
HOST mkdir c:\expimp\logs

EXIT

CREATE OR REPLACE DIRECTORY expimp AS 'c:\expimp';

CREATE OR REPLACE DIRECTORY expimp_log AS 'c:\expimp\logs';

GRANT READ,WRITE ON DIRECTORY expimp TO system;

GRANT READ,WRITE ON DIRECTORY expimp_log TO system;

HOST del c:\expimp\*.dmp

HOST del c:\expimp_logs\*.log

set serveroutput on

DECLARE
 ind       NUMBER;        -- loop index
 dph       NUMBER;        -- job handle
 pct_done  NUMBER;        -- percentage complete
 job_state VARCHAR2(30);  -- track job state
 le        ku$_LogEntry;  -- WIP and error messages
 js        ku$_JobStatus; -- job status from get_status
 jd        ku$_JobDesc;   -- job description from get_status
 sts       ku$_Status;    -- status object returned by get_status
BEGIN
  -- create job
  dph := dbms_datapump.open('EXPORT','SCHEMA',NULL,'EXAMPLE5','LATEST');

  -- specify dump file
  dbms_datapump.add_file(dph, 'example5.dmp', 'EXPIMP',
  filetype => dbms_datapump.ku$_file_type_dump_file);

  -- specify log file
  dbms_datapump.add_file(dph, 'example5.log', 'EXPIMP_LOG',
  filetype => dbms_datapump.ku$_file_type_log_file);

  -- specify export schema
  dbms_datapump.metadata_filter(dph, 'SCHEMA_EXPR', 'IN (''HR'')');

  -- set parallelism
  dbms_datapump.set_parallel(dph, 2);

  -- start job
  dbms_datapump.start_job(dph);

  -- monitor job
  pct_done := 0;
  job_state := 'UNDEFINED';
  WHILE (job_state != 'COMPLETED') AND (job_state != 'STOPPED') LOOP
    dbms_datapump.get_status(dph, dbms_datapump.ku$_status_job_error +
    dbms_datapump.ku$_status_job_statusdbms_datapump.ku$_status_wip, -1, job_state, sts);

    js := sts.job_status;

    -- if the percentage done changed, display the new value
    IF js.percent_done != pct_done THEN
      dbms_output.put_line('*** Job percent done = ' || TO_CHAR(js.percent_done));
      pct_done := js.percent_done;
    END IF;

    -- display any work-in-progress (WIP) or error messages received from the job
    IF (BITAND(sts.mask, dbms_datapump.ku$_status_wip) != 0) THEN
      le := sts.wip;
    ELSE
      IF (BITAND(sts.mask, dbms_datapump.ku$_status_job_error) != 0) THEN
        le := sts.error;
      ELSE
        le := NULL;
      END IF;
    END IF;

    IF le IS NOT NULL THEN
      ind := le.FIRST;
      WHILE ind IS NOT NULL LOOP
        dbms_output.put_line(le(ind).LogText);
        ind := le.NEXT(ind);
      END LOOP;
    END IF;
  END LOOP;

  -- indicate that the job finished and detach from it
  dbms_output.put_line('Job has completed');
  dbms_output.put_line('Final job state = ' || job_state);
  dbms_datapump.detach(dph);
EXCEPTION
  WHEN OTHERS THEN
    dbms_datapump.stop_job(dph);
END;
/
set serveroutput on

DECLARE
 l_dp_handle      NUMBER;
 l_sts            KU$_STATUS;
BEGIN
  l_dp_handle := dbms_datapump.open(operation => 'EXPORT',
                                    job_mode => 'SCHEMA',
                                    remote_link => NULL,
                                    job_name => 'EMP_EXPORT',
                                    version => 'LATEST');

  dbms_datapump.add_file(handle => l_dp_handle,
                         filename => 'SCOTT.dmp',
                         directory => 'TEST_DIR');

  dbms_datapump.metadata_filter(handle => l_dp_handle,
                                name => 'SCHEMA_EXPR',
                                value => '= ''SCOTT''');

  dbms_datapump.start_job(l_dp_handle);

  dbms_datapump.detach(l_dp_handle);
END;
/

-- check job status
system@db10g> SELECT * FROM dba_datapump_jobs;
DataPump Import conn / as sysdba

set serveroutput on

DECLARE
 dph NUMBER;
BEGIN
  dph := dbms_datapump.open(operation => 'IMPORT',
                            job_mode => 'TABLE',
                            job_name => 'EMP_IMPORT');

  dbms_datapump.add_file(handle => p_dph,
                         filename => 'EXPIMP%U.DMP',
                         directory => 'EXPIMP', filetype=>1);

  dbms_datapump.add_file(handle => dph,
                         filename => 'EXPIMP.LOG',
                         directory => 'EXPIMP_LOG', filetype=>3);

  dbms_datapump.set_parameter(handle => dph,
                              name => 'TABLE_EXISTS_ACTION',
                              value =>'REPLACE');

  dbms_datapump.start_job(dph);

  dbms_datapump.detach(dph);
EXCEPTION
  WHEN OTHERS THEN
    dbms_output.put_line('Error:' || sqlerrm || ' on Job-ID:' || dph);
END;
/

Related Topics
AMGT$DATAPUMP
Built-in Functions
Built-in Packages
Database Security
DataPump Executable
DBMS_DATAPUMP_INT
DBMS_DATAPUMP_UTL
DBMS_MASTER_TABLE
DBMS_PSWMG_IMPORT
Export
Import
KUPCC
KUPD$DATA_INT
KUPW$WORKER
PSTDY_DATAPUMP_SUPPORT
Transportable Tablespaces
TSDP_DATAPUMP
What's New In 21c
What's New In 23c

Morgan's Library Page Footer
This site is maintained by Dan Morgan. Last Updated: This site is protected by copyright and trademark laws under U.S. and International law. © 1998-2023 Daniel A. Morgan All Rights Reserved
  DBSecWorx