ACE Director Alum Daniel Morgan, founder of Morgan's Library, is scheduling
complimentary technical Workshops on Database Security for the first 30
Oracle Database customers located anywhere in North America, EMEA, LATAM, or
APAC that send an email to
asra_us@oracle.com. Request a Workshop for
your organization today.
Purpose
APIs to chunk a table into smaller units and execute those chunks in parallel.
dbms_parallel_execute.create_chunks_by_number_col(
task_name IN VARCHAR2,
table_owner IN VARCHAR2,
table_name IN VARCHAR2,
table_column IN VARCHAR2,
chunk_size IN NUMBER);
conn sh/sh@pdbdev
desc user_parallel_execute_chunks
SELECT COUNT(*)
FROM user_parallel_execute_chunks;
BEGIN
dbms_parallel_execute.create_task('SHTASK', 'Demo Task');
dbms_parallel_execute.create_chunks_by_number_col('SHTASK','SH','SALES','PROD_ID',100);
END;
/
SELECT COUNT(*)
FROM user_parallel_execute_chunks;
col task_name format a10
col start_ts format a10
col end_ts format a10
SELECT chunk_id, task_name, status, start_rowid, end_rowid, job_name, start_id
FROM user_parallel_execute_chunks;
SELECT chunk_id, task_name, status, start_id, end_id, start_ts, end_ts
FROM user_parallel_execute_chunks;
exec dbms_parallel_execute.drop_task('SHTASK');
SELECT COUNT(*)
FROM user_parallel_execute_chunks;
dbms_parallel_execute.create_chunks_by_rowid(
task_name IN VARCHAR2,
table_owner IN VARCHAR2,
table_name IN VARCHAR2,
by_row IN BOOLEAN,
chunk_size IN NUMBER);
conn sys@pdbdev as sysdba
SELECT DISTINCT salary FROM hr.employees ORDER BY 1;
DECLARE
l_sql_stmt VARCHAR2(1000);
l_try NUMBER;
l_status NUMBER;
BEGIN
-- create a TASK
dbms_parallel_execute.create_task('HRTASK');
-- chunk the table by ROWID
dbms_parallel_execute.create_chunks_by_rowid('HRTASK', 'HR', 'EMPLOYEES', TRUE, 100);
-- execute the DML in parallel
l_sql_stmt := 'UPDATE /*+ ROWID(e) */ HR.EMPLOYEES e
SET e.salary = e.salary - 1
WHERE rowid BETWEEN :start_id AND :end_id';
Chunks the table by means of a user-provided SELECT statement
dbms_parallel_execute.create_chunks_by_sql(
task_name IN VARCHAR2,
sql_stmt IN CLOB,
by_rowid IN BOOLEAN);
conn sys@pdbdev as sysdba
SELECT DISTINCT salary FROM hr.employees;
DECLARE
l_chunk_sql VARCHAR2(1000);
l_sql_stmt VARCHAR2(1000);
l_try NUMBER;
l_status NUMBER;
BEGIN
-- create the TASK
dbms_parallel_execute.create_task('UWTASK');
-- chunk the table by MANAGER_ID
l_chunk_sql := 'SELECT distinct manager_id, manager_id FROM hr.employees';
dbms_parallel_execute.create_chunks_by_sql('UWTASK', l_chunk_sql, FALSE);
-- execute the DML in parallel the WHERE clause contain a condition on manager_id,
-- which is the chunk column. We're grouping rows by manager_id
l_sql_stmt := 'update /*+ ROWID (dda) */ HR.EMPLOYEES e
SET e.salary = e.salary - 2
WHERE manager_id between :start_id and :end_id';
dbms_parallel_execute.run_task('UWTASK',l_sql_stmt,DBMS_SQL.NATIVE,parallel_level=>4);
-- if there is error, RESUME it for at most 2 times
l_try := 0;
l_status := dbms_parallel_execute.task_status('UWTASK');
WHILE(l_try < 2 and L_status !=
dbms_parallel_execute.finished) LOOP
l_try := l_try + 1;
dbms_parallel_execute.resume_task('UWTASK');
l_status := dbms_parallel_execute.task_status('UWTASK');
END LOOP;
-- done with processing; drop the task
dbms_parallel_execute.drop_task('UWTASK');
END;
/
SELECT DISTINCT salary FROM hr.employees;
SELECT chunk_id, task_name, status, start_rowid, end_rowid, job_name, start_id
FROM dba_parallel_execute_chunks;
SELECT chunk_id, task_name, status, start_id, end_id, start_ts, end_ts
FROM dba_parallel_execute_chunks;
CURSOR c1 IS
SELECT chunk_id
FROM user_parallel_execute_chunks
WHERE task_name = 'UWTASK'
AND status IN (dbms_parallel_execute.PROCESSED_WITH_ERROR,dbms_parallel_execute.ASSIGNED);
BEGIN
-- create the Objects, task, and chunk it by ROWID
dbms_parallel_execute.CREATE_TASK ('mytask');
dbms_parallel_execute.CREATE_CHUNKS_BY_ROWID('mytask', 'HR', 'EMPLOYEES', true, 100);
l_sql_stmt := 'update /*+ ROWID (dda) */ EMPLOYEES e
SET e.salary = e.salary + 10
WHERE rowid BETWEEN :start_id AND :end_id';
-- execute the DML in its own framework
-- process each chucks and commit it.
-- after processing one chunk, get another chunck to process until all are processed
<<main_processing>>
LOOP
-- get a chunk to process; If there's nothing to process, then exit the loop
dbms_parallel_execute.GET_ROWID_CHUNK('mytask', l_chunk_id, l_start_rowid, l_end_rowid, l_any_rows);
IF (l_any_rows = FALSE) THEN
EXIT;
END IF;
-- the chunck is specified by start_id and end_id.
-- bind the start_id and end_id and then execute it
-- them if no error occured, set the chunk status to PROCESSED
-- if exception occur, store the error num/msg
-- in the chunk table and continue to process the next chunk
BEGIN
EXECUTE IMMEDIATE l_sql_stmt
USING l_start_rowid, l_end_rowid;
dbms_parallel_execute.set_chunk_status('UWTASK', l_chunk_id, dbms_parallel_execute.processed);
EXCEPTION WHEN OTHERS THEN
dbms_parallel_execute.set_chunk_status('UWTASK', l_chunk_id,
dbms_parallel_execute.processed_with_error, SQLCODE, SQLERRM);
END;
-- finished processing one chunk; commit here
COMMIT;
END LOOP;
...
END;
/
Retries the given the task if the RUN_TASK Procedure finished with error, or it will resume the task if a crash has occurred.
Overload 1
dbms_parallel_execute.resume_task(
task_name IN VARCHAR2,
sql_stmt IN CLOB,
language_flag IN NUMBER,
edition IN VARCHAR2 DEFAULT NULL,
apply_crossedition_trigger IN VARCHAR2 DEFAULT NULL,
fire_apply_trigger IN BOOLEAN DEFAULT TRUE,
parallel_level IN NUMBER DEFAULT 0,
job_class IN VARCHAR2 DEFAULT 'DEFAULT_JOB_CLASS',
force IN BOOLEAN DEFAULT FALSE);
TBD
Overload 2
dbms_parallel_execute.resume_task(
task_name IN VARCHAR2,
force IN BOOLEAN DEFAULT FALSE);
Executes the specified SQL statement on the chunks in parallel
dbms_parallel_execute.run_task(
task_name IN VARCHAR2,
sql_stmt in CLOB,
language_flag IN NUMBER,
edition IN VARCHAR2 DEFAULT NULL,
apply_crossedition_trigger IN VARCHAR2 DEFAULT NULL,
fire_apply_trigger IN BOOLEAN DEFAULT TRUE,
parallel_level IN NUMBER DEFAULT 0,
job_class IN VARCHAR2 DEFAULT 'DEFAULT_JOB_CLASS');
dbms_parallel_execute.set_chunk_status(
task_name IN VARCHAR2,
chunk_id IN NUMBER,
status IN NUMBER,
err_num IN NUMBER DEFAULT NULL,
err_msg IN VARCHAR2 DEFAULT NULL);
dbms_parallel_execute.task_status(task_name IN VARCHAR2)
RETURN NUMBER;
set serveroutput on
DECLARE
x NUMBER;
BEGIN
x := dbms_parallel_execute.task_status('UWTASK');
dbms_output.put_line(x);
END;
/
EBR Related Demo
Update a large table incrementally
conn uwclass/uwclass@pdbdev
desc airplanes
ALTER TABLE airplanes ADD (ebr_demo DATE);
desc airplanes
DECLARE
l_sql_stmt VARCHAR2(512);
l_try NUMBER;
l_status NUMBER;
BEGIN
-- create a TASK
dbms_parallel_execute.create_task('EBRTASK');
-- chunk the table by ROWID
dbms_parallel_execute.create_chunks_by_rowid('EBRTASK', 'UWCLASS', 'AIRPLANES', TRUE, 100000);
-- execute the DML in parallel
l_sql_stmt := 'UPDATE /*+ ROWID(a) */ UWCLASS.AIRPLANES a
SET a.ebr_demo = SYSDATE
WHERE rowid BETWEEN :start_id AND :end_id';