通过shell脚本生成查询表数据的sql

在工作中我们需要查询表的数据条数,一般来说就是使用select count(1)或者select count(*)之类的语句。
当然了对于不同的表来说,应该还是可以做一些细分,能够最大程度的提高效率,比如表中含有主键列,尝试走索引扫面可能会被全表扫描效率要高。如果表中的数据太多,而且没有一些相关的约束,可以考虑使用并行来提高等等。
以下就是使用shell脚本所做的查询数据条数的一个例子,看起来有些太不值得了,但是如果数据量很大的情况下这些分析就格外有用了。

比如表customer,数据量不是很大,可以直接走索引来做。

> ksh get_tab_cnt.sh prdappo customer
 
 
************************************************

CUSTOMER               .859375


CUSTOMER

select  /*+ index_ffs(CUSTOMER,CUSTOMER_pk ) parallel_index(CUSTOMER,CUSTOMER_pk,1) */  'CUSTOMER,', count(*) from CUSTOMER  ;

如果表比较大,可以同时开启并行。

 
************************************************

GREEMENT            10.359375


GREEMENT

select  /*+ index_ffs(GREEMENT,GREEMENT_pk ) parallel_index(GREEMENT,GREEMENT_pk,4) */  'GREEMENT,', count(*) from GREEMENT  ;

对应的脚本如下:

#!/bin/ksh
#get_tab_cnt_sql
DATE=`/bin/date '+%y%m%d_%H%M%S'`

constr1=$DB_CONN_STR@$SH_DB_SID
constr1=`echo ${constr1} | tr '[:lower:]' '[:upper:]'`
timestamp=$3
time_vs=""
if [ "${timestamp}" = "" ]
then
        echo " "
        echo " "
else
        if [ "${timestamp2}" = "" ]
        then
                exit;
        else
                time_vs=" As of timestamp to_date('${timestamp}','mm/dd/yyyy hh24:mi:ss') "
                time_vt=" As of timestamp to_date('${timestamp2}','mm/dd/yyyy hh24:mi:ss') "
        fi
fi

user1=`echo $constr1 | awk -F "/" '{print $1}'`
inst1=`echo $constr1 | awk -F "@" '{print $2}'`

if [[ -z ${inst1} ]];then
inst1=$ORACLE_SID
constr1=${constr1}@${inst1}
fi

function check_connectivity
{
  Num=`echo show user | $ORACLE_HOME/bin/sqlplus -s $1| grep -i 'USER ' | wc -l`
  if [ $Num -gt 0 ]
        then
                ## ok - instance is accessible
               echo '************************************************'
        else
                ## inst is Inaccessible
                echo Connection Details: `echo $1 ` is Inaccessible...
                echo '************************************************'
                exit;
        fi
}


check_connectivity $constr1
sleep 1;

sleep 1;


mkdir `pwd`/Get_tab_cnt_${user1}_${inst1}_${DATE}
mkdir `pwd`/Get_tab_cnt_${user1}_${inst1}_${DATE}/List

small2='1'
small='10'
medium='30'
big_par=8
medium_par=4
small_par=2
small2_par=1

COUNT_DIR=`pwd`/Get_tab_cnt_${user1}_${inst1}_${DATE}

 

exclude_tab=x
sqlplus -s ${constr1} set head off
set line 100
set pages 50000
set long 10000
col segment_name for a40
spool ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_size_temp.lst
select distinct segment_name,to_number(to_char(sum(bytes/1024/1024/1024))) from dba_segments
where owner=upper('$1') and segment_name =upper('$2')  group by segment_name order by 2 desc;
spool off;
spool ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_with_pk.lst
select table_name  from dba_tables where owner=upper('$1')  and table_name in ( select table_name from dba_constraints  where owner=upper('$1')  and table_name =upper('$2')
and constraint_type='P');
spool off;

EOF


################## Getting Big Tables ######################

sed  '/^$/d' ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_size_temp.lst | grep -v SQL | grep -v select | grep -v ERROR | grep -v ORA- | grep -v '*' > ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_size.lst


echo ${medium} | awk -v medium=${medium} ' $2 > medium { print $1 } ' ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_size.lst > ${COUNT_DIR}/List/${user1}_${inst1}_big_tab.lst
echo ${medium} | echo ${small} | awk -v medium=${medium} -v small=${small} ' $2 small  { print $1 } ' ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_size.lst > ${COUNT_DIR}/List/${user1}_${inst1}_medium_tab.lst
echo ${small} | echo ${small2} | awk -v small=${small} -v small2=${small2} ' $2 small2 { print $1 } ' ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_size.lst > ${COUNT_DIR}/List/${user1}_${inst1}_small_tab.lst
echo ${small2} | awk -v  small2=${small2} ' $2 ${COUNT_DIR}/List/${user1}_${inst1}_small2_tab.lst


###############################( Generating Count Files ( Big ) #############################################
while read table
do
        is_pk=`grep -i ${table} ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_with_pk.lst | wc -l `
        is_pk=`echo $is_pk`
        if [ is_pk -ne 0 ]
        then
                echo "select  /*+ index_ffs(${table},${table}_pk ) parallel_index(${table},${table}_pk,${big_par}) */  '${table},', count(*) from ${table} ${time_vs} ;"  >> ${COUNT_DIR}/sqls/${user1}_${inst1}_${table}_tab_count.sql
        else
                echo "select  /*+ PARALLEL(${table},${big_par}) */ '${table} ,' , count (1) from  ${table} ${time_vs} ;" >> ${COUNT_DIR}/sqls/${user1}_${inst1}_${table}_tab_count.sql
        fi
done


################################ Generating Count Files ( Medium ) #############################################

while read table
do
        #echo " Table Name is : $table "
        is_pk=`grep -i ${table} ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_with_pk.lst | wc -l `
        is_pk=`echo $is_pk`
        if [ is_pk -ne 0 ]
        then
                echo "select  /*+ index_ffs(${table},${table}_pk ) parallel_index(${table},${table}_pk,${medium_par}) */  '${table},', count(*) from ${table} ${time_vt} ;"  >> ${COUNT_DIR}/List/${user1}_${inst1}_medium_tab_count.lst
        else
                echo "select   /*+ PARALLEL(${table},${medium_par}) */ '${table} ,' , count (1) from  ${table} ${time_vs} ;" >> ${COUNT_DIR}/List/${user1}_${inst1}_medium_tab_count.lst
        fi

done

################################ Generating Count Files ( Small ) #############################################

while read table
do
        #echo " Table Name is : $table "
        is_pk=`grep -i ${table} ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_with_pk.lst | wc -l `
        is_pk=`echo $is_pk`
        if [ is_pk -ne 0 ]
        then
                echo "select  /*+ index_ffs(${table},${table}_pk ) parallel_index(${table},${table}_pk,${small_par}) */  '${table},', count(*) from ${table} ${time_vs} ;"  >> ${COUNT_DIR}/List/${user1}_${inst1}_small_tab_count.lst
        else
                echo "select   /*+ PARALLEL(${table},${small_par}) */ '${table} ,' , count (1) from  ${table} ${time_vs} ;" >> ${COUNT_DIR}/List/${user1}_${inst1}_small_tab_count.lst
        fi
done

################################ Generating Count Files (Very Small ) #############################################

while read table
do
        #echo " Table Name is : $table "
        is_pk=`grep -i ${table} ${COUNT_DIR}/List/${user1}_${inst1}_all_tab_with_pk.lst | wc -l `
        is_pk=`echo $is_pk`
        if [ is_pk -ne 0 ]
        then
                echo "select  /*+ index_ffs(${table},${table}_pk ) parallel_index(${table},${table}_pk,${small2_par}) */  '${table},', count(*) from ${table} ${time_vs} ;"  >> ${COUNT_DIR}/List/${user1}_${inst1}_small2_tab_count.lst
        else
                echo "select   /*+ PARALLEL(${table},${small2_par}) */ '${table} ,' , count (1) from  ${table} ${time_vs} ;" >> ${COUNT_DIR}/List/${user1}_${inst1}_small2_tab_count.lst
        fi
done

 

 

rm  ${COUNT_DIR}/List/*temp.lst


#################### get table list from schemas ##########################
touch ${COUNT_DIR}/${user1}_${inst1}_final_tab_cnt_stats.sql
schema_con=${user1}_${inst1}
function get_final_tab_cnt_stats
{
schema_type=$1
if [ -s  ${COUNT_DIR}/List/${schema_con}_big_tab_count.lst ];then
cat ${COUNT_DIR}/List/${schema_con}_big_tab_count.lst >> ${COUNT_DIR}/${schema_con}_final_tab_cnt_stats.sql
fi
if  [ -s ${COUNT_DIR}/List/${schema_con}_medium_tab_count.lst ];then
cat ${COUNT_DIR}/List/${schema_con}_medium_tab_count.lst >> ${COUNT_DIR}/${schema_con}_final_tab_cnt_stats.sql
fi
if [ -s ${COUNT_DIR}/List/${schema_con}_small2_tab_count.lst ];then
cat ${COUNT_DIR}/List/${schema_con}_small2_tab_count.lst >> ${COUNT_DIR}/${schema_con}_final_tab_cnt_stats.sql
fi
if [ -s ${COUNT_DIR}/List/${schema_con}_small_tab_count.lst ];then
cat ${COUNT_DIR}/List/${schema_con}_small_tab_count.lst >> ${COUNT_DIR}/${schema_con}_final_tab_cnt_stats.sql
fi
}

get_final_tab_cnt_stats $schema_con

cat ${COUNT_DIR}/${schema_con}_final_tab_cnt_stats.sql
rm -rf ${COUNT_DIR}

 

上一篇:Struts2.3+Spring3.2.8+Hibernate4.1全注解配置


下一篇:android Fragments详解六:处理fragement的生命周期