We have found that over time test databases do not match production databases is data or even in data volume unless we refresh them frequently. The problem is getting the development and test staff to allow the downtime necessary to do a database refresh from production especially for larger databases. So how do we make sure out execution plans in our test environment match what the production execution plans would be. The answer is to take the cost based optimizer statistics and put them in the production database.
1. To move the cost based optimizer statistics we first create a stats table and put the CBO stats in that stats table. In our example we will use the SYSTEM schema for the stats table.
----------------------------------------------------------------------
-- Setup our stats table and export our stats for the passed schema
-- into the stats table
----------------------------------------------------------------------
DECLARE
v_tbl_cnt NUMBER ;
BEGIN
-- enable DBMS_OUTPUT
dbms_output.enable (1000000) ;
-- Check if our stats table exists
SELECT count(*)
INTO v_tbl_cnt
FROM dba_tables
WHERE owner = 'SYSTEM'
AND table_name = 'CBO_STATS' ;
-- If stats table exists then we must get rid of it to ensure
-- that we always recreate it to ensure structure is correct
IF v_tbl_cnt > 0 THEN
dbms_output.put_line('Having to Remove SYSTEM.CBO_STATS as it already Exists.');
-- Remove our stats table now that we have exported it
DBMS_STATS.drop_stat_table('SYSTEM','CBO_STATS') ;
END IF ;
-- Create our stats table
dbms_output.put_line ('Creating SYSTEM.CBO_STATS Table to Store CBO Stats for Schema &1.') ;
dbms_stats.create_stat_table(ownname => 'SYSTEM', stattab => 'CBO_STATS', tblspace => 'USERS') ;
-- put our schema stats into out created stats table
dbms_output.put_line ('Exporting Schema Stats for &1 into SYSTEM.CBO_STATS table.') ;
dbms_stats.export_schema_stats (ownname => '&1', stattab => 'CBO_STATS', statown => 'SYSTEM') ;
END ;
/
2. Next we will need to export the stats table we just put the cost based optimizer statistics into
----------------------------------------------------------------------
-- export our stats table so it can be imported to another database
-- We will use the DBMS_DATAPUMP Package to do our export to simplify
-- the process
----------------------------------------------------------------------
DECLARE
v_handle NUMBER ;
ind NUMBER ; -- Loop index
spos NUMBER ; -- String starting position
slen NUMBER ; -- String length for output
percent_done NUMBER ; -- Percentage of job complete
job_state VARCHAR2(30) ; -- To keep track of job state
le ku$_LogEntry ; -- For WIP and error messages
js ku$_JobStatus ; -- The job status from get_status
jd ku$_JobDesc ; -- The job description from get_status
sts ku$_Status ; -- The status object returned by get_status
BEGIN
dbms_output.enable (1000000) ;
-- create our datapump export job
dbms_output.put_line ('Opening Export Job') ;
v_handle := DBMS_DATAPUMP.OPEN (operation => 'EXPORT',
job_mode => 'TABLE',
job_name => 'EXPORT_CBO_STATS_JOB',
version => 'COMPATIBLE');
-- set our logfile
dbms_output.put_line ('Setting Log file for Export Job') ;
DBMS_DATAPUMP.ADD_FILE (handle => v_handle,
filename => 'expdp_export_cbo_stats.log',
directory => 'DMPDIR',
filetype => 3) ;
-- set our dump file
dbms_output.put_line ('Setting Dump file for Export Job') ;
DBMS_DATAPUMP.ADD_FILE (handle => v_handle,
filename => 'expdp_export_cbo_stats.dmp',
directory => 'DMPDIR',
filetype => 1) ;
-- Add the schema filter
DBMS_DATAPUMP.METADATA_FILTER(v_handle,'SCHEMA_EXPR','IN (''SYSTEM'')');
-- set the filter for datapump to be the schema we want to export.
dbms_output.put_line ('Adding filter to only get SYSTEM.CBO_STATS') ;
DBMS_DATAPUMP.METADATA_FILTER (handle => v_handle,
name => 'NAME_EXPR',
value => '= ''CBO_STATS''',
object_type => 'TABLE') ;
-- Start the datapump export job
dbms_output.put_line ('Starting Datapump Export Job to dump SYSTEM.CBO_STATS table.') ;
DBMS_DATAPUMP.START_JOB (v_handle) ;
-- May have to use WAIT_FOR_JOB
-- initialize percent_done and job_state values
percent_done := 0 ;
job_state := 'UNDEFINED' ;
-- Loop while the job_state is not completed or stopped
while (job_state != 'COMPLETED') and (job_state != 'STOPPED') loop
dbms_datapump.get_status(v_handle,
dbms_datapump.ku$_status_job_error +
dbms_datapump.ku$_status_job_status +
dbms_datapump.ku$_status_wip,-1,job_state,sts) ;
js := sts.job_status ;
-- If the percentage done changed, display the new value.
if js.percent_done != percent_done then
dbms_output.put_line('*** Job percent done = ' to_char(js.percent_done)) ;
percent_done := js.percent_done ;
end if ;
-- If any work-in-progress (WIP) or Error messages were received for the job,
-- display them.
if (bitand(sts.mask,dbms_datapump.ku$_status_wip) != 0) then
le := sts.wip ;
else
if (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0) then
le := sts.error ;
else
le := null ;
end if ;
end if ;
if le is not null then
ind := le.FIRST ;
-- loop while loop indicator is not null
while ind is not null loop
dbms_output.put_line(le(ind).LogText);
ind := le.NEXT(ind);
end loop;
end if;
end loop;
-- Indicate that the job finished and gracefully detach from it.
dbms_output.put_line('Data Pump Import Job has completed') ;
dbms_output.put_line('Final job state = ' job_state) ;
dbms_datapump.detach(v_handle) ;
-- Handle exceptions from our export
EXCEPTION
WHEN OTHERS THEN
dbms_output.put_line('Exception in Data Pump job') ;
dbms_datapump.get_status(v_handle,dbms_datapump.ku$_status_job_error, 0, job_state, sts) ;
IF (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0) THEN
le := sts.error;
if le is not null then
ind := le.FIRST ;
while ind is not null loop
spos := 1;
slen := length(le(ind).LogText) ;
if slen > 255 then
slen := 255;
end if;
while slen > 0 loop
dbms_output.put_line(substr(le(ind).LogText, spos, slen));
spos := spos + 255;
slen := length(le(ind).LogText) + 1 - spos;
end loop;
ind := le.NEXT(ind);
end loop;
end if;
END IF ;
END ;
/
3. If we use a database link (TARGET) to the database we are moving the stats to and it is on a different host then we can transfer the file across the database link with a DMPDIR defined on the other side. Otherwise we can copy the file through OS FTP, SCP or copy commands.
----------------------------------------------------------------------
-- Transfer the file via DBMS_FILE_TRANSFER over network link
-- Can change process to just do import over network link
----------------------------------------------------------------------
EXEC DBMS_FILE_TRANSFER.PUT_FILE ('DMPDIR', 'expdp_export_cbo_stats.dmp', 'DMPDIR', 'expdp_export_cbo_stats.dmp', 'TARGET') ;
4. Now the stats table can be removed now that we have exported the table and transferred the dump file to the destination.
----------------------------------------------------------------------
-- Clean up by Removing our stats table now that we have exported it
----------------------------------------------------------------------
EXEC DBMS_STATS.drop_stat_table('SYSTEM','CBO_STATS') ;
5. Connect to the destination database so that we can import the stats table.
-- Our example here the 2 databases are on the same host so our directory is still ok
-- Import our stats table into the other database for stats import use
DECLARE
ind NUMBER ; -- Loop index
spos NUMBER ; -- String starting position
slen NUMBER ; -- String length for output
v_handle NUMBER ; -- Data Pump job handle
percent_done NUMBER ; -- Percentage of job complete
job_state VARCHAR2(30) ; -- To keep track of job state
le ku$_LogEntry ; -- For WIP and error messages
js ku$_JobStatus ; -- The job status from get_status
jd ku$_JobDesc ; -- The job description from get_status
sts ku$_Status ; -- The status object returned by get_status
BEGIN
dbms_output.enable (1000000) ;
-- Create a Data Pump job to do a "full" import as only our stats table is there
-- create our datapump export job
v_handle := DBMS_DATAPUMP.OPEN (operation => 'IMPORT',
job_mode => 'FULL',
job_name => 'IMPORT_CBO_STATS_JOB',
version => 'COMPATIBLE');
-- set our logfile for import
DBMS_DATAPUMP.ADD_FILE (handle => v_handle,
filename => 'expdp_import_cbo_stats.log',
directory => 'DMPDIR',
filetype => 3) ;
-- set our dump file
DBMS_DATAPUMP.ADD_FILE (handle => v_handle,
filename => 'expdp_export_cbo_stats.dmp',
directory => 'DMPDIR',
filetype => 1) ;
-- If a table already exists in the destination schema, replace it before loading
-- to ensure the structure is proper as the data prior to is not important here.
DBMS_DATAPUMP.SET_PARAMETER(v_handle,'TABLE_EXISTS_ACTION','REPLACE');
-- Start the job. An exception is returned if something is not set up properly.
DBMS_DATAPUMP.START_JOB(v_handle);
-- The import job should now be running. In the following loop, the job is
-- monitored until it completes. In the meantime, progress information is
-- displayed. Note: this is identical to the export example.
-- initialize percent_done and job_state values
percent_done := 0 ;
job_state := 'UNDEFINED' ;
-- Loop while the job_state is not completed or stopped
while (job_state != 'COMPLETED') and (job_state != 'STOPPED') loop
dbms_datapump.get_status(v_handle,
dbms_datapump.ku$_status_job_error +
dbms_datapump.ku$_status_job_status +
dbms_datapump.ku$_status_wip,-1,job_state,sts) ;
js := sts.job_status ;
-- If the percentage done changed, display the new value.
if js.percent_done != percent_done then
dbms_output.put_line('*** Job percent done = ' to_char(js.percent_done)) ;
percent_done := js.percent_done ;
end if ;
-- If any work-in-progress (WIP) or Error messages were received for the job,
-- display them.
if (bitand(sts.mask,dbms_datapump.ku$_status_wip) != 0) then
le := sts.wip ;
else
if (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0) then
le := sts.error ;
else
le := null ;
end if ;
end if ;
if le is not null then
ind := le.FIRST ;
-- loop while loop indicator is not null
while ind is not null loop
dbms_output.put_line(le(ind).LogText);
ind := le.NEXT(ind);
end loop;
end if;
end loop;
-- Indicate that the job finished and gracefully detach from it.
dbms_output.put_line('Data Pump Import Job has completed') ;
dbms_output.put_line('Final job state = ' job_state) ;
dbms_datapump.detach(v_handle) ;
-- Handle exceptions from our import
EXCEPTION
WHEN OTHERS THEN
dbms_output.put_line('Exception in Data Pump job') ;
dbms_datapump.get_status(v_handle,dbms_datapump.ku$_status_job_error, 0, job_state, sts) ;
IF (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0) THEN
le := sts.error;
if le is not null then
ind := le.FIRST ;
while ind is not null loop
spos := 1;
slen := length(le(ind).LogText);
if slen > 255 then
slen := 255;
end if;
while slen > 0 loop
dbms_output.put_line(substr(le(ind).LogText, spos, slen));
spos := spos + 255;
slen := length(le(ind).LogText) + 1 - spos;
end loop;
ind := le.NEXT(ind);
end loop;
end if;
END IF ;
END ;
/
6. If bringing the stats table from a prior version of Oracle into a newer version of Oracle you will need to upgrade the stats table.
------------------------------------------------------------------
-- If moving stats from a previous version of database need to upgrade stats table
------------------------------------------------------------------
exec dbms_stats.upgrade_stat_table ('SYSTEM', 'CBO_STATS') ;
7. Lastly import the statistics into the schema.
------------------------------------------------------------------
-- Import the schema stats exported from the other database
------------------------------------------------------------------
EXEC DBMS_STATS.import_schema_stats('&1','CBO_STATS',NULL,'SYSTEM');
Monday, May 5, 2008
Subscribe to:
Post Comments (Atom)
No comments:
Post a Comment