--------------------------------------------------------------------------------
-- @name: SQL Trace too many rows and reads
-- @author: dion cho
-- @note: test case for the multiple rows and reads for the unique update
--------------------------------------------------------------------------------

----------------------------------------------------
-- Case 1: batched DML
@capture_on
set echo on
set pages 10000
set lines 200
set timing off
set trimspool on


drop table t1 purge;

create table t1
as
select level as c1, level as c2
from dual 
connect by level <= 1000;

create unique index t1_n1 on t1(c1);

exec dbms_stats.gather_table_stats(user, 't1');

-- single update
exec tpack.begin_diag_trace;

update t1 set c2 = c2 + 1 where c1 = 1;
        
rollback;

exec tpack.end_diag_trace;

select * from table(tpack.get_diag_trace(userenv('sid'), 'TKPROF'));



-- bulk update
exec tpack.begin_diag_trace;

declare
    type c1_type is table of t1.c1%type;
    type c2_type is table of t1.c2%type;
    type t1_type is record (c1s c1_type, c2s c2_type); 
    t1s t1_type;
begin
    select * bulk collect into t1s.c1s, t1s.c2s from t1;
    
    forall idx in 1 .. 1000 
        update t1 set c2 = t1s.c2s(idx)+1 where c1 = t1s.c1s(idx);
        
    commit;
    
end;
/

exec tpack.end_diag_trace;

select * from table(tpack.get_diag_trace(userenv('sid'), 'TKPROF'));


-- bulk update and original trace file
-- bulk update
exec tpack.begin_diag_trace;

declare
    type c1_type is table of t1.c1%type;
    type c2_type is table of t1.c2%type;
    type t1_type is record (c1s c1_type, c2s c2_type); 
    t1s t1_type;
begin
    select * bulk collect into t1s.c1s, t1s.c2s from t1;
    
    forall idx in 1 .. 1000 
        update t1 set c2 = t1s.c2s(idx)+1 where c1 = t1s.c1s(idx);
        
    commit;
    
end;
/

exec tpack.end_diag_trace;

select * from table(tpack.get_diag_trace);


-----------------------------------------------------------------------------
-- Case 2: consistent reads

drop table t1 purge;

create table t1(c1 int, c2 char(2000), c3 char(2000), c4 char(2000));

create unique index t1_n1 on t1(c1);

insert into t1 values(1, 'x', 'x', 'x');
insert into t1 values(2, 'x', 'x', 'x');
commit;


-- session #2
-- temp2.sql
--begin
--	for idx in 1 .. 1000 loop
--		update t1 set c2 = idx, c3 = idx, c4 = idx where c1 = 1;
--		--commit;
--	end loop;
--end;
--/

ho start sqlplus tpack/tpack@ukja1106 @temp2


-- wait till session #2 signals me
exec tpack.wait_for_signal;


-- session #1
exec tpack.begin_diag_trace;

select * from t1 where c1 = 1;

exec tpack.end_diag_trace;

select * from table(tpack.get_diag_trace(userenv('sid'), 'TKPROF'));


--------------------------------------------------------------------------
-- Case 3: write consistency

create table t1
as
select level as c1, rpad('x',1000) as c2
from dual
connect by level <= 10000
;

create or replace package pkg_temp
is
	g_update_cnt number;
end;
/


-- create trigger
create or replace trigger trg1
after update on t1
for each row
begin
	pkg_temp.g_update_cnt := pkg_temp.g_update_cnt + 1;
end;
/


exec pkg_temp.g_update_cnt := 0;

update t1 set c2 = rpad('y',1000)
where c1 = c1
;

commit;

exec dbms_output.put_line('update cnt = ' || pkg_temp.g_update_cnt);


exec pkg_temp.g_update_cnt := 0;

update t1 set c2 = rpad('y',1000)
where c1 = c1
;

-- session #2
update t1 set c1 = c1+1 where c1 = 10000;
commit;

-- session #1 (wait till previous update statement completes)
commit;

exec dbms_output.put_line('update cnt = ' || pkg_temp.g_update_cnt);


@capture_off




