ACE Director Alum Daniel Morgan, founder of Morgan's Library, is scheduling
complimentary technical Workshops on Database Security for the first 30
Oracle Database customers located anywhere in North America, EMEA, LATAM, or
APAC that send an email to
asra_us@oracle.com. Request a Workshop for
your organization today.
Purpose
PL/SQL API to DataPump Import, Export, and Transportable Tablespaces
AUTHID
CURRENT_USER
Constants
Name
Data Type
Value
Bitmap Definitions For TTS_CLOSURE_CHECK Parameter
KU$_TTS_CLOSURE_CHECK_ON
BINARY_INTEGER
1
KU$_TTS_CLOSURE_CHECK_OFF
BINARY_INTEGER
2
KU$_TTS_CLOSURE_CHECK_FULL
BINARY_INTEGER
4
KU$_TTS_CLOSURE_CHECK_TEST
BINARY_INTEGER
8
Bitmap Definitions For Transportable Parameter
KU$_TTS_NEVER
BINARY_INTEGER
1
KU$_TTS_ALWAYS
BINARY_INTEGER
2
KU$_TTS_KEEP_READ_ONLY
BINARY_INTEGER
4
KU$_TTS_NO_BITMAP_REBLD
BINARY_INTEGER
8
Dumpfile Types
KU$_DUMPFILE_TYPE_DISK
BINARY_INTEGER
0
KU$_DUMPFILE_TYPE_PIPE
BINARY_INTEGER
1
KU$_DUMPFILE_TYPE_TAPE
BINARY_INTEGER
2
KU$_DUMPFILE_TYPE_TEMPLATE
BINARY_INTEGER
3
KU$_DUMPFILE_TYPE_URIDISK
BINARY_INTEGER
4
KU$_FILE_TYPE_DUMP_FILE
BINARY_INTEGER
1
KU$_FILE_TYPE_BAD_FILE
BINARY_INTEGER
2
KU$_FILE_TYPE_LOG_FILE
BINARY_INTEGER
3
KU$_FILE_TYPE_SQL_FILE
BINARY_INTEGER
4
KU$_FILE_TYPE_URIDUMP_FILE
BINARY_INTEGER
5
KU$_JOB_COMPLETE
NUMBER
1
KU$_JOB_COMPLETE_ERRORS
NUMBER
2
KU$_JOB_STOPPED
NUMBER
3
KU$_JOB_ABORTED
NUMBER
4
KU$_JOB_VIEW_ALL
NUMBER
0
KU$_JOB_TTS_TABLESPACES
NUMBER
1
KU$_JOB_VIEW_ENCCOL_TABLES
NUMBER
2
Mask Bit Definitions
KU$_STATUS_WIP
BINARY_INTEGER
1
KU$_STATUS_JOB_DESC
BINARY_INTEGER
2
KU$_STATUS_JOB_STATUS
BINARY_INTEGER
4
KU$_STATUS_JOB_ERROR
BINARY_INTEGER
8
KU$_STATUS_VERSION_1
NUMBER
1
KU$_STATUS_VERSION_2
NUMBER
2
KU$_STATUS_VERSION_3
NUMBER
3
KU$_STATUS_VERSION_4
NUMBER
4
KU$_STATUS_VERSION_5
NUMBER
5
KU$_STATUS_VERSION
NUMBER
KU$_STATUS_VERSION_5
KU$_DATAOPT_SKIP_CONST_ERR
NUMBER
1
KU$_DATAOPT_XMLTYPE_CLOB
NUMBER
2
KU$_DATAOPT_NOTYPE_EVOL
NUMBER
4
KU$_DATAOPT_DISABL_APPEND_HINT
NUMBER
8
KU$_DATAOPT_REJECT_ROWS_REPCHR
NUMBER
16
KU$_DATAOPT_ENABLE_NET_COMP
NUMBER
32
KU$_DATAOPT_GRP_PART_TAB
NUMBER
64
KU$_DATAOPT_TRUST_EXIST_TB_PAR
NUMBER
128
KU$_DATAOPT_VALIDATE_TBL_DATA
NUMBER
256
KU$_DATAOPT_VERIFY_STREAM_FORM
NUMBER
512
KU$_DATAOPT_CONT_LD_ON_FMT_ERR
NUMBER
1024
KU$_ICRPP_PREIMPORT
NUMBER
0
KU$_ICRPP_FINAL
NUMBER
1
KU$_ICRPP_EARLY
NUMBER
2
KU$_ICRFLAGS_IS_EXPR
NUMBER
1
KU$_ICRFLAGS_EARLY_IMPORT
NUMBER
2
KU$_ICRFLAGS_GET_DEPENDENTS
NUMBER
4
KU$_ICRFLAGS_EXCLUDE
NUMBER
8
KU$_ICRFLAGS_XDB_NO_TTS
NUMBER
16
KU$_COMPRESS_NONE
NUMBER
1
KU$_COMPRESS_METADATA
NUMBER
2
Item Codes for entry in a dump file info table (of type ku$_dumpfile_info)
KU$_DFHDR_FILE_VERSION
NUMBER
1
KU$_DFHDR_MASTER_PRESENT
NUMBER
2
KU$_DFHDR_GUID
NUMBER
3
KU$_DFHDR_FILE_NUMBER
NUMBER
4
KU$_DFHDR_CHARSET_ID
NUMBER
5
KU$_DFHDR_CREATION_DATE
NUMBER
6
KU$_DFHDR_FLAGS
NUMBER
7
KU$_DFHDR_JOB_NAME
NUMBER
8
KU$_DFHDR_PLATFORM
NUMBER
9
KU$_DFHDR_INSTANCE
NUMBER
10
KU$_DFHDR_LANGUAGE
NUMBER
11
KU$_DFHDR_BLOCKSIZE
NUMBER
12
KU$_DFHDR_DIRPATH
NUMBER
13
KU$_DFHDR_METADATA_COMPRESSED
NUMBER
14
KU$_DFHDR_DB_VERSION
NUMBER
15
KU$_DFHDR_MASTER_PIECE_COUNT
NUMBER
16
KU$_DFHDR_MASTER_PIECE_NUMBER
NUMBER
17
KU$_DFHDR_DATA_COMPRESSED
NUMBER
18
KU$_DFHDR_METADATA_ENCRYPTED
NUMBER
19
KU$_DFHDR_DATA_ENCRYPTED
NUMBER
20
KU$_DFHDR_COLUMNS_ENCRYPTED
NUMBER
21
Miscellaneous
KU$_DFHDR_HADOOP_TRAILERS
NUMBER
24
KU$_DFHDR_TTSTM_DUMP
NUMBER
25
KU$_DFHDR_MAX_ITEM_CODE
NUMBER
25
Obsolete Modes: Deprecated
KU$_DFHDR_ENCPWD_MODE
NUMBER
22
KU$_DFHDR_ENCPWD_MODE_UNKNOWN
NUMBER
1
KU$_DFHDR_ENCPWD_MODE_NONE
NUMBER
2
KU$_DFHDR_ENCPWD_MODE_PASSWORD
NUMBER
3
KU$_DFHDR_ENCPWD_MODE_DUAL
NUMBER
4
KU$_DFHDR_ENCPWD_MODE_TRANS
NUMBER
5
Modes Returnable for item code KU$_DFHDR_ENCRYPTION_MODE
KU$_DFHDR_ENCRYPTION_MODE
NUMBER
22
KU$_DFHDR_ENCMODE_UNKNOWN
NUMBER
1
KU$_DFHDR_ENCMODE_NONE
NUMBER
2
KU$_DFHDR_ENCMODE_PASSWORD
NUMBER
3
KU$_DFHDR_ENCMODE_DUAL
NUMBER
4
KU$_DFHDR_ENCMODE_TRANS
NUMBER
5
Modes Returnable for item code KU$_DFHDR_COMPRESSION_ALG
KU$_DFHDR_COMPRESSION_ALG
NUMBER
23
KU$_DFHDR_CMPALG_UNKNOWN
NUMBER
1
KU$_DFHDR_CMPALG_NONE
NUMBER
2
KU$_DFHDR_CMPALG_BASIC
NUMBER
3
KU$_DFHDR_CMPALG_LOW
NUMBER
4
KU$_DFHDR_CMPALG_MEDIUM
NUMBER
5
KU$_DFHDR_CMPALG_HIGH
NUMBER
6
Remap Flag
KU$_DATA_REMAP_WITH_ROWID
NUMBER
1
Job Description Data Types
CREATE TYPE sys.ku$_ParamValue1010 AS OBJECT (
param_name VARCHAR2(30),
param_op VARCHAR2(30),
param_type VARCHAR2(30),
param_length NUMBER,
param_value_n NUMBER,
param_value_t VARCHAR2(2000));
/
CREATE TYPE sys.ku$_ParamValues1010 AS TABLE OF sys.ku$_ParamValue1010;
CREATE TYPE sys.ku$_JobDesc1020 IS OBJECT (
job_name VARCHAR2(30), -- The job name
guid RAW(16), -- The job GUID
operation VARCHAR2(30), -- Current operation
job_mode VARCHAR2(30), -- Current mode
remote_link VARCHAR2(4000), -- DB link, if any
owner VARCHAR2(30), -- Job owner
platform VARCHAR2(101), -- Current job platform
exp_platform VARCHAR2(101), -- Export platform
global_name VARCHAR2(4000), -- Current global name
exp_global_name VARCHAR2(4000), -- Export global name
instance VARCHAR2(16), -- The instance name
db_version VARCHAR2(30), -- Version of objects
exp_db_version VARCHAR2(30), -- Export version
scn NUMBER, -- Job SCN
creator_privs VARCHAR2(30), -- Privs of job
start_time DATE, -- This job start time
exp_start_time DATE, -- Export start time
term_reason NUMBER, -- Job termination code
max_degree NUMBER, -- Max. parallelism
log_file VARCHAR2(4000), -- Log file name
sql_file VARCHAR2(4000), -- SQL file name
params ku$_ParamValues1010) -- Parameter list
/
CREATE TYPE sys.ku$_JobDesc1210 IS OBJECT (
job_name VARCHAR2(30), -- The job name
guid RAW(16), -- The job GUID
operation VARCHAR2(30), -- Current operation
job_mode VARCHAR2(30), -- Current mode
remote_link VARCHAR2(4000), -- DB link, if any
owner VARCHAR2(30), -- Job owner
platform VARCHAR2(101), -- Current job platform
exp_platform VARCHAR2(101), -- Export platform
global_name VARCHAR2(4000), -- Current global name
exp_global_name VARCHAR2(4000), -- Export global name
instance VARCHAR2(16), -- The instance name
db_version VARCHAR2(30), -- Cur. server software version
exp_db_version VARCHAR2(30), -- Export svr. software version
job_version VARCHAR2(30), -- Negotiated data version
scn NUMBER, -- Job SCN
creator_privs VARCHAR2(30), -- Privs of job
start_time DATE, -- This job start time
exp_start_time DATE, -- Export start time
term_reason NUMBER, -- Job termination code
max_degree NUMBER, -- Max. parallelism
timezone VARCHAR2(64), -- Cur. server timezone
exp_timezone VARCHAR2(64), -- Exp. server timezone
tstz_version NUMBER, -- Cur. server timezone version
exp_tstz_version NUMBER, -- Exp. server timezone
endianness VARCHAR2(16), -- Cur. platform's endianness
exp_endianness VARCHAR2(16), -- Exp. platform's endianness endianness is 'BIG' or 'LITTLE'
charset VARCHAR2(28), -- Cur. server charset
exp_charset VARCHAR2(28), -- Exp. server charset
ncharset VARCHAR2(28), -- Cur. server national charset
exp_ncharset VARCHAR2(28), -- Exp. server national charset
log_file VARCHAR2(4000), -- Log file name
sql_file VARCHAR2(4000), -- SQL file name
params ku$_ParamValues1010) -- Parameter list
/
DumpFile Types
CREATE TYPE sys.ku$_DumpFile1010 AS OBJECT (
file_name VARCHAR2(4000),
file_type NUMBER,
file_size NUMBER,
file_bytes_written NUMBER);
/
CREATE TYPE sys.ku$_DumpFileSet1010 AS TABLE OF sys.ku$_DumpFile1010;
/
CREATE TYPE sys.ku$_dumpfile_item IS OBJECT (
item_code NUMBER, -- Identifies header item
value VARCHAR2(2048)); -- Text string value
/
Executable-Package Equivalencies
The examples at right were written by Richard Harrison who invested a substantial effort in putting them together and are republished here with his permission.
The original post can be found here.
Executable
Package
ACCESS_METHOD
dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'AUTOMATIC');
dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'DIRECT_PATH');
dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'EXTERNAL_TABLE');
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'COMPRESSION', value => 'ALL');
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'COMPRESSION', value => 'DATA_ONLY');
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'COMPRESSION', value => 'METADATA_ONLY');
CONTENT
dbms_datapump.set_parameter(handle => h1, name => 'INCLUDE_METADATA', value => 1); ALL
dbms_datapump.set_parameter(handle => h1, name => 'INCLUDE_METADATA', value => 0); DATA_ONLY
dbms_datapump.data_filter(handle => h1, name => 'INCLUDE_ROWS', value => 0); METADATA_ONLY
DATA_OPTIONS
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'DATA_OPTIONS', value => dbms_datapump.KU$_DATAOPT_XMLTYPE_CLOB);
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'ALL');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'DATA_ONLY');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'ENCRYPTED_COLUMNS_ONLY');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'METADATA_ONLY');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION', value => 'NONE');
ENCRYPTION_ALGORITHM
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_ALGORITHM', value => 'AES128');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_ALGORITHM', value => 'AES192');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_ALGORITHM', value => 'AES256');
ENCRYPTION_MODE
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_MODE', value => 'DUAL');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_MODE', value => 'PASSWORD');
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_MODE', value => 'TRANSPARENT');
ENCRYPTION_PASSWORD
dbms_datapump.set_parameter(handle => h1, name => 'ENCRYPTION_PASSWORD', value => 'secret password');
ESTIMATE
dbms_datapump.set_parameter(handle => h1, name => 'ESTIMATE', value => 'BLOCKS');
ESTIMATE_ONLY
dbms_datapump.set_parameter(handle => h1, name => 'ESTIMATE_ONLY', value => 1);
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'KEEP_MASTER', value => 1);
dbms_datapump.set_parameter(handle => l_dp_handle, name => 'KEEP_MASTER', value => 0);
dbms_datapump.metadata_filter(HANDLE => HANDLR, NAME => 'METRICS', VALUE => 0);
dbms_datapump.metadata_filter(HANDLE => HANDLR, NAME => 'METRICS', VALUE => 1);
dbms_datapump.set_parameter(handle => h1, name => 'TRANSPORTABLE', value => 'ALWAYS');
dbms_datapump.set_parameter(handle => h1, name => 'TRANSPORTABLE', value => 'NEVER');
TRANSPORT_FULL_CHECK
dbms_datapump.set_parameter(handle => h1, name => 'TTS_FULL_CHECK', value => 0); N
dbms_datapump.set_parameter(handle => h1, name => 'TTS_FULL_CHECK', value => 1); Y
dbms_datapump.set_parameter(handle => h1, name => 'USER_METADATA', value => 1); export create user statement in schema mode (only if datapump_exp_full_database role is granted)
dbms_datapump.set_parameter(handle => h1, name => 'USER_METADATA', value => 0); don't export create user statement in schema mode
CREATE TYPE sys.ku$_JobStatus1020 IS OBJECT (
job_name VARCHAR2(30), -- Name of the job
operation VARCHAR2(30), -- Current operation
job_mode VARCHAR2(30), -- Current mode
bytes_processed NUMBER, -- Bytes so far
total_bytes NUMBER, -- Total bytes for job
percent_done NUMBER, -- Percent done
degree NUMBER, -- Of job parallelism
error_count NUMBER, -- #errors so far
state VARCHAR2(30), -- Current job state
phase NUMBER, -- Job phase
restart_count NUMBER, -- #Job restarts
worker_status_list ku$_WorkerStatusList1020, -- For (non-idle) job worker processes
files ku$_DumpFileSet1010); -- Dump file info
/
CREATE TYPE sys.ku$_JobStatus1120 IS OBJECT (
job_name VARCHAR2(30), -- Name of the job
operation VARCHAR2(30), -- Current operation
job_mode VARCHAR2(30), -- Current mode
bytes_processed NUMBER, -- Bytes so far
total_bytes NUMBER, -- Total bytes for job
percent_done NUMBER, -- Percent done
degree NUMBER, -- Of job parallelism
error_count NUMBER, -- #errors so far
state VARCHAR2(30), -- Current job state
phase NUMBER, -- Job phase
restart_count NUMBER, -- #Job restarts
worker_status_list ku$_WorkerStatusList1120, -- For (non-idle) job worker processes
files ku$_DumpFileSet1010); -- Dump file info
/
CREATE TYPE sys.ku$_JobStatus1210 IS OBJECT
(
job_name VARCHAR2(30), -- Name of the job
operation VARCHAR2(30), -- Current operation
job_mode VARCHAR2(30), -- Current mode
bytes_processed NUMBER, -- Bytes so far
total_bytes NUMBER, -- Total bytes for job
percent_done NUMBER, -- Percent done
degree NUMBER, -- Of job parallelism
error_count NUMBER, -- #errors so far
state VARCHAR2(30), -- Current job state
phase NUMBER, -- Job phase
restart_count NUMBER, -- #Job restarts worker_status_list ku$_WorkerStatusList1210,
-- For (non-idle)
-- job worker processes
files ku$_DumpFileSet1010) -- Dump file info
/
CREATE TYPE sys.ku$_Status1120 IS OBJECT (
mask NUMBER, -- Status types present
wip ku$_LogEntry1010, -- Work in progress
job_description ku$_JobDesc1020, -- Complete job description
job_status ku$_JobStatus1120, -- Detailed job status
error ku$_LogEntry1010) -- Multi-level context errors
/
CREATE TYPE sys.ku$_Status1210 IS OBJECT (
mask NUMBER, -- Status types present
wip ku$_LogEntry1010, -- Work in progress
job_description ku$_JobDesc1210, -- Complete job description
job_status ku$_JobStatus1210, -- Detailed job status
error ku$_LogEntry1010) -- Multi-level context errors
/
Log Entry & Error Types
CREATE TYPE sys.ku$_LogLine1010 AS OBJECT (
logLineNumber NUMBER,
errorNumber NUMBER,
LogText VARCHAR2(2000) );
/
CREATE PUBLIC SYNONYM ku$_LogLine1010
FOR sys.ku$_LogLine1010;
CREATE TYPE sys.ku$_LogEntry1010 AS TABLE OF
sys.ku$_LogLine1010;
/
CREATE PUBLIC SYNONYM ku$_LogEntry1010
FOR sys.ku$_LogEntry1010;
Status Types
CREATE TYPE sys.ku$_status1010 AS OBJECT (
mask NUMBER,
wip ku$_LogEntry1010,
job_description ku$_JobDesc1010,
job_status ku$_JobStatus1010,
error ku$_LogEntry1010);
/
CREATE TYPE sys.ku$_Status1020 IS OBJECT (
mask NUMBER, -- Status types present
wip ku$_LogEntry1010, -- Work in progress
job_description ku$_JobDesc1020, -- Complete job description
job_status ku$_JobStatus1020, -- Detailed job status
error ku$_LogEntry1010) -- Multi-level context errors
/
CREATE TYPE sys.ku$_Status1120 IS OBJECT (
mask NUMBER, -- Status types present
wip ku$_LogEntry1010, -- Work in progress
job_description ku$_JobDesc1020, -- Complete job description
job_status ku$_JobStatus1120, -- Detailed job status
error ku$_LogEntry1010) -- Multi-level context errors
/
Worker Status Types
CREATE TYPE sys.ku$_WorkerStatus1010 AS OBJECT (
worker_number NUMBER,
process_name VARCHAR2(30),
state VARCHAR2(30),
schema VARCHAR2(30),
name VARCHAR2(4000),
object_type VARCHAR2(200),
partition VARCHAR2(30),
completed_objects NUMBER,
total_objects NUMBER,
completed_rows NUMBER,
completed_bytes NUMBER,
percent_done NUMBER)
/
CREATE TYPE sys.ku$_WorkerStatusList1010 AS TABLE OF sys.ku$_WorkerStatus1010
/
CREATE TYPE sys.ku$_WorkerStatus1020 AS OBJECT (
worker_number NUMBER, -- Worker process identifier
process_name VARCHAR2(30), -- Worker process name
state VARCHAR2(30), -- Worker process state
schema VARCHAR2(30), -- Schema name
name VARCHAR2(4000), -- Object name
object_type VARCHAR2(200), -- Object type
partition VARCHAR2(30), -- Partition name
completed_objects NUMBER, -- Completed number of objects
total_objects NUMBER, -- Total number of objects
completed_rows NUMBER, -- Number of rows completed
completed_bytes NUMBER, -- Number of bytes completed
percent_done NUMBER, -- Percent done current object
degree NUMBER); -- Degree of parallelism
/
CREATE TYPE sys.ku$_WorkerStatus1120 AS OBJECT (
worker_number NUMBER, -- Worker process identifier
process_name VARCHAR2(30), -- Worker process name
state VARCHAR2(30), -- Worker process state
schema VARCHAR2(30), -- Schema name
name VARCHAR2(4000), -- Object name
object_type VARCHAR2(200), -- Object type
partition VARCHAR2(30), -- Partition name
completed_objects NUMBER, -- Completed number of objects
total_objects NUMBER, -- Total number of objects
completed_rows NUMBER, -- Number of rows completed
completed_bytes NUMBER, -- Number of bytes completed
percent_done NUMBER, -- Percent done current object
degree NUMBER, -- Degree of parallelism
instance_id NUMBER); -- Instance ID where running
/
CREATE TYPE sys.ku$_WorkerStatus1210 AS OBJECT
(
worker_number NUMBER, -- Worker process identifier
process_name VARCHAR2(30), -- Worker process name
state VARCHAR2(30), -- Worker process state
schema VARCHAR2(30), -- Schema name
name VARCHAR2(4000), -- Object name
object_type VARCHAR2(200), -- Object type
partition VARCHAR2(30), -- Partition name
completed_objects NUMBER, -- Completed number of objects
total_objects NUMBER, -- Total number of objects
completed_rows NUMBER, -- Number of rows completed
completed_bytes NUMBER, -- Number of bytes completed
percent_done NUMBER, -- Percent done current object
degree NUMBER, -- Degree of parallelism
instance_id NUMBER, -- Instance ID where running
instance_name VARCHAR2(60), -- Instance Name where running
host_name VARCHAR2(64)); -- Host name where running
CREATE TYPE sys.ku$_WorkerStatusList1010 AS TABLE OF sys.ku$_WorkerStatus1010
/
CREATE TYPE sys.ku$_WorkerStatusList1020 AS TABLE OF sys.ku$_WorkerStatus1020
/
CREATE TYPE sys.ku$_WorkerStatusList1120 AS TABLE OF sys.ku$_WorkerStatus1120
/
CREATE TYPE sys.ku$_WorkerStatusList1210 AS TABLE OF sys.ku$_WorkerStatus1210
/
INVALID_ARGVAL: An invalid value was supplied for an input parameter
ORA-39002
INVALID_OPERATION: Cannot be executed because of inconsistencies between the API and the job
ORA-39004
INVALID_STATE: The state of the job precludes the execution of the API
ORA-39005
INCONSISTENT_ARGS: Inconsistent arguments
ORA-39006
INTERNAL_ERROR: Internal datapump exception
ORA-31623
INVALID_HANDLE: Incorrect handle specified for the job
ORA-31626
NO_SUCH_JOB: A invalid reference to a job which is no longer executing
ORA-31627
SUCCESS_WITH_INFO: User specified job parameters that yielded informational messages
ORA-31631
PRIVILEGE_ERROR: The necessary privileges are not available for operations
ORA-31634
JOB_EXISTS: Job creation or restart failed due to duplicate name
ORA-39211
NO_DUMPFILE_INFO: User specified an invalid or inaccessible file
ORA-39327
WARNING_DV_NOENCRYPT: Database Vault data is being stored unencrypted in dump file set
Filters
Name
Object Type
Meaning
INCLUDE_NAME_EXPR
and
EXCLUDE_NAME_EXPR
Named objects
Defines which object names are included in (INCLUDE_NAME_EXPR), or excluded from (EXCLUDE_NAME_EXPR), the job. You use the object_type parameter to limit the filter to a particular object type.
For Table mode, identifies which tables are to be processed.
SCHEMA_EXPR
Schema objects
Restricts the job to objects whose owning schema name is satisfied by the expression.
For Table mode, only a single SCHEMA_EXPR filter is supported. If specified, it must only specify a single schema (for example, 'IN (''SCOTT'')').
For Schema mode, identifies which users are to be processed.
TABLESPACE_EXPR
Table, Cluster, Index, Rollback Segment
Restricts the job to objects stored in a tablespace whose name is satisfied by the expression.
For Tablespace mode, identifies which tablespaces are to be processed. If a partition of an object is stored in the tablespace, the entire object is added to the job.
For Transportable mode, identifies which tablespaces are to be processed. If a table has a single partition in the tablespace set, all partitions must be in the tablespace set.
An index is not included within the tablespace set unless all of its partitions are in the tablespace set. A domain index is not included in the tablespace set unless all of its secondary objects are included in the tablespace set.
INCLUDE_PATH_EXPR and EXCLUDE_PATH_EXPR
All
Defines which object paths are included in, or excluded from, the job. You use these filters to select only certain object types from the database or dump file set.
Objects of paths satisfying the condition are included (INCLUDE_PATH_EXPR) or excluded (EXCLUDE_PATH_EXPR) from the operation. The object_path parameter is not supported for these filters.
First Available
10.1
Remaps
Name
Data Type
Object Type
Meaning
REMAP_DATAFILE
Text
Library, Tablespace, Directory
Any datafile reference in the job that matches the object_type parameter and referenced the old_value datafile will be redefined to use the value datafile.
REMAP_SCHEMA
Text
Schema Objects
Any schema object in the job that matches the object_type parameter and was located in the old_value schema will be moved to the value schema.
Any storage segment in the job that matches the object_type parameter and was located in the old_value tablespace will be relocated to the value tablespace.
Adds a sequential device to the dump file set for Export, Import, or Sql_file operations
dbms_datapump.add_device(
handle IN NUMBER, -- job handle
device_name IN VARCHAR2, -- name of device being added
volume_size IN VARCHAR2 DEFAULT NULL); -- device storage capacity
Adds files to the dump file set for an Export, Import, or SQL_FILE operation or specifies the log file or the output file for a SQL_FILE operation
dbms_datapump.add_file(
handle IN NUMBER,
filename IN VARCHAR2,
directory IN VARCHAR2 DEFAULT NULL,
filesize IN VARCHAR2 DEFAULT NULL,
filetype IN NUMBER DEFAULT dbms_datapump.ku$_file_type_dump_file,
reusefile IN NUMBER DEFAULT NULL);
dbms_datapump.create_job_view(
job_schema IN VARCHAR2,
job_name IN VARCHAR2,
view_name IN VARCHAR2,
view_type IN VARCHAR2 DEFAULT dbms_datapump.ku$_job_view_all);
TBD
Overload 2
dbms_datapump.create_job_view(
handle IN NUMBER,
view_name IN VARCHAR2,
view_type IN VARCHAR2 DEFAULT dbms_datapump.ku$_job_view_all);
Specifies restrictions on the rows that are to be retrieved
Overload 1
dbms_datapump.data_filter (
handle IN NUMBER,
name IN VARCHAR2, -- filter name
value IN NUMBER,
table_name IN VARCHAR2 DEFAULT NULL, -- if not specified = all
schema_name IN VARCHAR2 DEFAULT NULL);
TBD
Overload 2
dbms_datapump.data_filter(
handle IN NUMBER,
name IN VARCHAR2,
value IN CLOB,
table_name IN VARCHAR2 DEFAULT NULL,
schema_name IN VARCHAR2 DEFAULT NULL);
TBD
Overload 3
dbms_datapump.data_filter(
handle IN NUMBER,
name IN VARCHAR2,
value IN VARCHAR2,
table_name IN VARCHAR2 DEFAULT NULL,
schema_name IN VARCHAR2 DEFAULT NULL);
dbms_datapump.data_remap(
handle IN NUMBER, -- job handle
name IN VARCHAR2,
table_name IN VARCHAR2,
column IN VARCHAR2,
function IN VARCHAR2,
schema IN VARCHAR2 DEFAULT NULL,
remap_flags IN NUMBER DEFAULT 0); -- alt. value dbms_datapump.ku$_data_remap_with_rowid
Returns information about an object from the master table
dbms_datapump.get_object_info(
object_owner IN VARCHAR2,
object_name IN VARCHAR2,
object_type IN VARCHAR2,
processing_status OUT VARCHAR2,
processing_state OUT VARCHAR2);
SQL> DECLARE
2 pstatus VARCHAR2(30);
3 pstate VARCHAR2(30);
4 BEGIN
5 dbms_datapump.get_object_info('UWCLASS', 'SERVERS', 'TABLE', pstatus, pstate);
6 dbms_output.put_line(pstatus);
7 dbms_output.put_line(pstate);
8 END;
9 /
DECLARE
*
ERROR at line 1:
ORA-31623: a job is not attached to this session via the specified handle
ORA-06512: at "SYS.KUPV$FT_INT", line 1872
ORA-06512: at "SYS.DBMS_SYS_ERROR", line 79
ORA-06512: at "SYS.KUPV$FT_INT", line 1805
ORA-06512: at "SYS.DBMS_DATAPUMP", line 2266
ORA-06512: at "SYS.DBMS_DATAPUMP", line 7526
ORA-06512: at line 5
dbms_datapump.log_error(
handle IN NUMBER,
message IN VARCHAR2,
error_number IN NUMBER DEFAULT 0,
fatal_error IN NUMBER DEFAULT 0,
log_file_only IN NUMBER DEFAULT 0);
Creates a filters that restricts the items that are included in a job
Overload 1
dbms_datapump.metadata_filter (
handle IN NUMBER,
name IN VARCHAR2,
value IN VARCHAR2,
object_path IN VARCHAR2 DEFAULT NULL,
object_type IN VARCHAR2 DEFAULT NULL);
Transform names and values (valid for both overloads)
Filter Name
Transform Values
Data Type
Applicable Object Types
NAME_EXPR
A single named object
TEXT
All
NAME_LIST
A comma-separated list of objects
TEXT
All
SCHEMA_EXPR
A single named schema
TEXT
Schema
SCHEMA_LIST
A comma-separated list of schemas
TEXT
Schemas
TABLESPACE_EXPR
A single named tablespace
TEXT
Table, Cluster, Index
TABLESPACE_LIST
A comma-separated list of tablespaces
TEXT
Table, Cluster, Index
INCLUDE_PATH_EXPR
A
TEXT
All
INCLUDE_PATH_LIST
A comma-separated list of
TEXT
All
EXCLUDE_PATH_EXPR
TEXT
All
EXCLUDE_PATH_LIST
A comma-separated list of
TEXT
All
EXCLUDE_TABLES
A comma-separated list of tables to be excluded (not exported)
TEXT
Table Export
TABLES_AS_VIEWS
A comma-separated list of views to be exported as tables
TEXT
Table Export
See Export Demo Below
Overload 2
dbms_datapump.metadata_filter (
handle IN NUMBER,
name IN VARCHAR2,
value IN CLOB,
object_path IN VARCHAR2 DEFAULT NULL,
object_type IN VARCHAR2 DEFAULT NULL);
Specifies transformations to be applied to objects as they are processed in the specified job
Overload 1
dbms_datapump.metadata_transform (
handle IN NUMBER,
name IN VARCHAR2,
value IN VARCHAR2,
object_type IN VARCHAR2 DEFAULT NULL);
Transform names and values (valid for both overloads)
Transform Name
Transform Values
Data Type
Applicable Object Types
DISABLE_ARCHIVE_LOGGING
N or Y: The default is
TEXT
TABLE, INDEX
LOB_STORAGE
BASICFILE, DEFAULT, NO_CHANGE, SECUREFILE
TEXT
TABLE
OID
0 or 1: The default is 1
NUMBER
TYPE TABLE
PCTSPACE
Defaults is 100
NUMBER
TABLE, INDEX, TABLESPACE
SEGMENT_ATTRIBUTES
If non-zero drop storage segment params: Default is 1
NUMBER
TABLE, INDEX
SEGMENT_CREATION
If non-zero will include the segment creation clause: Default is 1
NUMBER
TABLE
STORAGE
If non-zero drops the storage clause: Default is 1
NUMBER
TABLE
TABLE_COMPRESSION_CLAUSE
Specify NONE to no specify a compression clause
TEXT
TABLE
The list of object types can be found in the DATABASE_EXPORT_OBJECTS, SCHEMA_EXPORT_OBJECTS, TABLE_EXPORT_OBJECTS, and the TABLESPACE_EXPORT_OBJECTS views.
TBD
Overload 2
dbms_datapump.metadata_transform (
handle IN NUMBER,
name IN VARCHAR2,
value IN NUMBER,
object_type IN VARCHAR2 DEFAULT NULL);
The list of object types can be found in the DATABASE_EXPORT_OBJECTS, SCHEMA_EXPORT_OBJECTS, TABLE_EXPORT_OBJECTS, and the TABLESPACE_EXPORT_OBJECTS views.
Declare a new job using the Data Pump API. The handle that is returned is used as a parameter for calls to all other procedures except ATTACH.
dbms_datapump.open (
operation IN VARCHAR2,
job_mode IN VARCHAR2,
remote_link IN VARCHAR2 DEFAULT NULL,
job_name IN VARCHAR2 DEFAULT NULL,
version IN VARCHAR2 DEFAULT 'COMPATIBLE',
compression IN NUMBER DEFAULT dbms_datapump.ku$_compress_metadata)
RETURN NUMBER;
Job Mode
Description
FULL
Full database excluding the SYS, XDB,ORDSYS, MDSYS, CTXSYS, ORDPLUGINS, and LBACSYS schemas
SCHEMA
Operates on a set of selected schemas. Defaults to current user. All objects in the selected schemas are processed. Users cannot specify schemas noted above
TABLE
Operates on a set of selected tables. Defaults to all of the tables in the current user's schema. Only tables and their dependent objects are processed.
TABLESPACE
Operates on a set of selected tablespaces. No defaulting is performed. Tables that have storage in the specified tablespaces are processed in the same manner as in Table mode.
TRANSPORTABLE
Operates on metadata for tables (and their dependent objects) within a set of selected tablespaces to perform a transportable tablespace export/import.
dbms_datapump.setup_remote_context(
user_name IN VARCHAR2,
job_name IN VARCHAR2,
version IN NUMBER,
status_xml IN VARCHAR2,
status_xml_len IN NUMBER,
more IN NUMBER);
dbms_datapump.start_job (
handle IN NUMBER,
skip_current IN NUMBER DEFAULT 0,
abort_step IN NUMBER DEFAULT 0,
cluster_ok IN NUMBER DEFAULT 1,
service_name IN VARCHAR2 DEFAULT NULL);
set linesize 121
col owner format a20
col directory_name format a15
col directory_path format a60
SELECT *
FROM dba_directories;
HOST mkdir c:\expimp
HOST mkdir c:\expimp\logs
EXIT
CREATE OR REPLACE DIRECTORY expimp AS 'c:\expimp';
CREATE OR REPLACE DIRECTORY expimp_log AS 'c:\expimp\logs';
GRANT READ,WRITE ON DIRECTORY expimp TO system;
GRANT READ,WRITE ON DIRECTORY expimp_log TO system;
HOST del c:\expimp\*.dmp
HOST del c:\expimp_logs\*.log
set serveroutput on
DECLARE
ind NUMBER; -- loop index
dph NUMBER; -- job handle
pct_done NUMBER; -- percentage complete
job_state VARCHAR2(30); -- track job state
le ku$_LogEntry; -- WIP and error messages
js ku$_JobStatus; -- job status from get_status
jd ku$_JobDesc; -- job description from get_status
sts ku$_Status; -- status object returned by get_status
BEGIN
-- create job
dph := dbms_datapump.open('EXPORT','SCHEMA',NULL,'EXAMPLE5','LATEST');
-- if the percentage done changed, display the new value
IF js.percent_done != pct_done THEN
dbms_output.put_line('*** Job percent done = ' || TO_CHAR(js.percent_done));
pct_done := js.percent_done;
END IF;
-- display any work-in-progress (WIP) or error messages received from the job
IF (BITAND(sts.mask, dbms_datapump.ku$_status_wip) != 0) THEN
le := sts.wip;
ELSE
IF (BITAND(sts.mask, dbms_datapump.ku$_status_job_error) != 0) THEN
le := sts.error;
ELSE
le := NULL;
END IF;
END IF;
IF le IS NOT NULL THEN
ind := le.FIRST;
WHILE ind IS NOT NULL LOOP
dbms_output.put_line(le(ind).LogText);
ind := le.NEXT(ind);
END LOOP;
END IF;
END LOOP;
-- indicate that the job finished and detach from it
dbms_output.put_line('Job has completed');
dbms_output.put_line('Final job state = ' || job_state);
dbms_datapump.detach(dph);
EXCEPTION
WHEN OTHERS THEN
dbms_datapump.stop_job(dph);
END;
/