Sunday, July 15, 2012

Clone Schema using Datapump API

There is no single command for cloning a schema/user. But you can use datapump expdp and impdp utility with remap_schema option to clone a schema. However, i ran into a situation where the developers did not have access to O.S . The limitation of Datapump is that it runs as a background process and not a client tool.  This means that the dumpfile is generated in the DB server where the data is being exported in.  If the developer does not have access to the O.S, then he cannot access the dumpfile generated..

 There is a workaround to this situation.  There is a feature of datapump utility where you can directly import data across a DB Link without exporting. Now you use this feature along with using the Datapump API instead of command prompt and we have a way to clone schema.

You can look at Oracle document to write better scripts. But, below is one i used. I have highlighted places where you will need to change value according to your environment.



set serveroutput on
DECLARE
  ind NUMBER;              -- Loop index
  spos NUMBER;             -- String starting position
  slen NUMBER;             -- String length for output
  h1 NUMBER;               -- Data Pump job handle
  percent_done NUMBER;     -- Percentage of job complete
  job_state VARCHAR2(30);  -- To keep track of job state
  le ku$_LogEntry;         -- For WIP and error messages
  js ku$_JobStatus;        -- The job status from get_status
  jd ku$_JobDesc;          -- The job description from get_status
  sts ku$_Status;          -- The status object returned by get_status
  --v_psw dba_users.password%type;
  v_psw varchar2(30) ;  -- password new user
  v_row varchar2(8) ;   -- mode rows=N si '1'
  cur_scn number ;      -- source
BEGIN
-- mode rows=N si '1'
-- v_row :=1;
--cur_scn:=6030373057 ;
--select password into v_psw from dba_users where username = '&user_c';
  h1 := Dbms_DataPump.Open(operation => 'IMPORT', job_mode => 'SCHEMA', job_name => 'impdp_vkumar3', version => 'COMPATIBLE', remote_link => 'DBLINKNAME_VK' );

    dbms_datapump.set_parallel(handle => h1, degree => 2);
    -- uncomment if you want the logfile written
    -- Dbms_DataPump.Add_File(handle => h1, filename => 'vkumar_imp1', directory => 'DATA_PUMP_DIR', filetype => 3);
    -- enter value for source schame
    dbms_datapump.metadata_filter(handle => h1, name => 'SCHEMA_EXPR', value => 'IN(''VIJAY_PROD'')'); 
-- cause bug Bug 5071931 DATAPUMP IMPORT WITH REMAP TABLESPACE, AND SCHEMA IS VERY SLOW , exclure le calcul des stats
  DBMS_DATAPUMP.METADATA_FILTER(handle=> h1, name => 'EXCLUDE_PATH_EXPR', value => '=''TABLE_STATISTICS''');
  DBMS_DATAPUMP.METADATA_FILTER(handle=> h1, name => 'EXCLUDE_PATH_EXPR', value => '=''INDEX_STATISTICS''');
  -- remap source to destination
  Dbms_DataPump.METADATA_REMAP( h1 , 'REMAP_SCHEMA', 'VIJAY_PROD' , 'VIJAY_DEV' );
  -- USE SKIP or REPLACE option if file exists.
  Dbms_DataPump.Set_Parameter(handle => h1, name => 'TABLE_EXISTS_ACTION', value => 'SKIP');
  --Dbms_DataPump.Set_Parameter(handle => h1, name => 'FLASHBACK_SCN', value => cur_scn);
-- Initial extent clause omit
  dbms_datapump.metadata_transform ( h1, 'STORAGE' , 0 , null ) ;
--  dbms_datapump.metadata_transform ( handle => h1, name => 'SEGMENT_ATTRIBUTES' , value => 'n' ) ;
-- test equivalent to ROWS=N
-- mode rows=N si '1'
  IF v_row = '1' THEN
        dbms_datapump.data_filter(handle=> h1, name=> 'INCLUDE_ROWS' , value=>0);
  END IF;
-- Start the job. An exception will be returned if something is not set up
-- properly.One possible exception that will be handled differently is the
-- success_with_info exception. success_with_info means the job started
-- successfully, but more information is available through get_status about
-- conditions around the start_job that the user might want to be aware of.
    begin
    dbms_datapump.start_job(h1);
    dbms_output.put_line('Data Pump job started successfully');
    exception
      when others then
        if sqlcode = dbms_datapump.success_with_info_num
        then
          dbms_output.put_line('Data Pump job started with info available:');
          dbms_datapump.get_status(h1,
                                   dbms_datapump.ku$_status_job_error,0,
                                   job_state,sts);
          if (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0)
          then
            le := sts.error;
            if le is not null
            then
              ind := le.FIRST;
              while ind is not null loop
                dbms_output.put_line(le(ind).LogText);
                ind := le.NEXT(ind);
              end loop;
            end if;
          end if;
        else
          raise;
        end if;
  end;
-- The export job should now be running. In the following loop, we will monitor
-- the job until it completes. In the meantime, progress information is
-- displayed.

 percent_done := 0;
  job_state := 'UNDEFINED';
  while (job_state != 'COMPLETED') and (job_state != 'STOPPED') loop
    dbms_datapump.get_status(h1,
           dbms_datapump.ku$_status_job_error +
           dbms_datapump.ku$_status_job_status +
           dbms_datapump.ku$_status_wip,-1,job_state,sts);
    js := sts.job_status;
-- If the percentage done changed, display the new value.
     if js.percent_done != percent_done
    then
      dbms_output.put_line('*** Job percent done = ' ||
                           to_char(js.percent_done));
      percent_done := js.percent_done;
    end if;
-- Display any work-in-progress (WIP) or error messages that were received for
-- the job.
      if (bitand(sts.mask,dbms_datapump.ku$_status_wip) != 0)
    then
      le := sts.wip;
    else
      if (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0)
      then
        le := sts.error;
      else
        le := null;
      end if;
    end if;
    if le is not null
    then
      ind := le.FIRST;
      while ind is not null loop
        dbms_output.put_line(le(ind).LogText);
        ind := le.NEXT(ind);
      end loop;
    end if;
  end loop;
-- Indicate that the job finished and detach from it.
  dbms_output.put_line('Job has completed');
  dbms_output.put_line('Final job state = ' || job_state);
  dbms_datapump.detach(h1);
-- test if v_psw != 'EXIST' and != 'idem' , then set this psw , else password would be the original one from target user
 -- IF v_psw != 'EXIST' and v_psw != 'idem' THEN
   --     execute immediate 'alter user &user_c identified by '||v_psw||' ' ;
--  END IF;
-- Any exceptions that propagated to this point will be captured. The
-- details will be retrieved from get_status and displayed.
  exception
    when others then
      dbms_output.put_line('Exception in Data Pump job');
      dbms_datapump.get_status(h1,dbms_datapump.ku$_status_job_error,0,
                               job_state,sts);
      if (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0)
      then
        le := sts.error;
        if le is not null
        then
          ind := le.FIRST;
          while ind is not null loop
            spos := 1;
            slen := length(le(ind).LogText);
            if slen > 255
            then
              slen := 255;
            end if;
            while slen > 0 loop
              dbms_output.put_line(substr(le(ind).LogText,spos,slen));
              spos := spos + 255;
              slen := length(le(ind).LogText) + 1 - spos;
            end loop;
            ind := le.NEXT(ind);
          end loop;
        end if;
      end if;
END;
/
-- end
-- compile & stats
 exec dbms_output.put_line('** Compilation and Statistics gathering ...') ;
 EXEC DBMS_UTILITY.compile_schema(schema => 'VIJAY_DEV1');
 exec DBMS_STATS.GATHER_SCHEMA_STATS ( 'VIJAY_DEV',DBMS_STATS.AUTO_SAMPLE_SIZE,null,'FOR ALL INDEXED COLUMNS SIZE AUTO',2,'ALL',TRUE) ;