sqoop导出脚本(不同库)
#!/bin/sh
source ExitCodeCheck.sh
opts=$@
getparam(){
arg=$1
echo $opts |xargs -n1 |cut -b 2- |awk -F'=' '{if($1=="'"$arg"'") print $2}'
}
IncStart=`getparam inc_start`
IncEnd=`getparam inc_end`
oracle_connection=`getparam jdbc_str`
oracle_username=`getparam db_user`
oracle_password=`getparam db_psw`
dataName=`getparam db_sid`
queueName=`getparam hdp_queue`
hdfshostname=`getparam hdfs_host`
IncStartYear=`echo ${IncStart:0:4}`;
IncStartMonth=`echo ${IncStart:4:2}`;
IncStartDay=`echo ${IncStart:6:2}`;
IncStartAll=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay}" 00:00:00.0";
IncStartAllFormat=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay};
IncEndYear=`echo ${IncEnd:0:4}`;
IncEndMonth=`echo ${IncEnd:4:2}`;
IncEndDay=`echo ${IncEnd:6:2}`;
IncEndAll=${IncEndYear}"-"${IncEndMonth}"-"${IncEndDay}" 00:00:00.0";
twoDayAgo=`date -d "$IncStart 2 days ago " +%Y%m%d `;
twoDayAgoYear=`echo ${twoDayAgo:0:4}`;
twoDayAgoMonth=`echo ${twoDayAgo:4:2}`;
twoDayAgoDay=`echo ${twoDayAgo:6:2}`;
twoDayAgoAll=${twoDayAgoYear}"-"${twoDayAgoMonth}"-"${twoDayAgoDay}" 00:00:00.0";
twoDayAgoAllFormat=${twoDayAgoYear}"-"${twoDayAgoMonth}"-"${twoDayAgoDay};
job_name=$0
#需要导出的数据oracle表名
export_table_name=NCHRMS_ORGANIZATION_INTF;
#需要导出到oracle的数据的临时文件名
sqoop_export_data_filename=${export_table_name};
#需要导出的数据oracle列名
export_table_columns=ORG_ID,PARENT_ORG_ID,ORG_CODE,ORG_EN_NAME,ORG_CH_NAME,ORG_TAG,EFFECTIVE_DATE,LAPSED_DATE,PLACE_CODE,ORG_BIZ_CODE,IS_ACTIVE,ORG_LEVEL,ORG_SERIES,CREATED_BY,CREATED_DATE,UPDATED_BY,UPDATED_DATE
#需要导出到oracle的数据的临时文件目录
sqoop_export_data_dir=/apps-data/hduser0101/sx_360_safe/export/${sqoop_export_data_filename};
hadoop dfs -rmr ${sqoop_export_data_dir};
#创建用于导出到oracle的临时数据
hive -v -e "set mapred.job.queue.name=${queueName};
set mapred.job.name=${job_name}_1;
use an_pafc_safe;
insert overwrite directory '${sqoop_export_data_dir}'
select
ORG_ID,
PARENT_ORG_ID,
ORG_CODE,
ORG_EN_NAME,
ORG_CH_NAME,
ORG_TAG,
EFFECTIVE_DATE,
LAPSED_DATE,
PLACE_CODE,
ORG_BIZ_CODE,
IS_ACTIVE,
ORG_LEVEL,
ORG_SERIES,
CREATED_BY,
CREATED_DATE,
UPDATED_BY,
UPDATED_DATE
from lnc_cris_safe.nchrms_organization_intf ;";
exitCodeCheck $?
#先删除目的数据库的数据2天前数
sqoop eval -Dmapred.job.queue.name=${queueName} \
--connect ${oracle_connection} \
--username ${oracle_username} \
--password ${oracle_password} \
--verbose \
--query "delete from ${export_table_name}";
exitCodeCheck $?
#先删除目的数据库的数据,支持二次运行
sqoop eval -Dmapred.job.queue.name=${queueName} \
--connect ${oracle_connection} \
--username ${oracle_username} \
--password ${oracle_password} \
--verbose \
--query "delete from ${export_table_name}";
exitCodeCheck $?
#再导出数据
sqoop export -D mapred.job.name=${job_name}_2 -D sqoop.export.statements.per.transaction=4500 -D mapreduce.map.tasks=1 -D mapred.map.max.attempts=1 -D mapred.reduce.max.attempts=1 -D mapreduce.map.maxattempts=1 -D mapreduce.reduce.maxattempts=1 -D mapred.job.queue.name=${queueName} \
--connect ${oracle_connection} \
--username ${oracle_username} \
--password ${oracle_password} \
--export-dir ${sqoop_export_data_dir} \
--verbose \
--num-mappers 1 \
--table ${export_table_name} \
--columns ${export_table_columns} \
--input-fields-terminated-by '\001' \
--input-lines-terminated-by '\n' \
--input-null-string '\\N' \
--input-null-non-string '\\N';
exitCodeCheck $?
sqoop导出脚本
#!/bin/sh
source ExitCodeCheck.sh
opts=$@
getparam(){
arg=$1
echo $opts |xargs -n1 |cut -b 2- |awk -F'=' '{if($1=="'"$arg"'") print $2}'
}
IncStart=`getparam inc_start`
IncEnd=`getparam inc_end`
pg_connection=`getparam jdbc_str`
pg_username=`getparam db_user`
pg_password=`getparam db_psw`
dataName=`getparam db_sid`
queueName=`getparam hdp_queue`
hdfshostname=`getparam hdfs_host`
IncStartYear=`echo ${IncStart:0:4}`;
IncStartMonth=`echo ${IncStart:4:2}`;
IncStartDay=`echo ${IncStart:6:2}`;
IncStartAll=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay}" 00:00:00.0";
IncStartAllFormat=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay};
IncEndYear=`echo ${IncEnd:0:4}`;
IncEndMonth=`echo ${IncEnd:4:2}`;
IncEndDay=`echo ${IncEnd:6:2}`;
IncEndAll=${IncEndYear}"-"${IncEndMonth}"-"${IncEndDay}" 00:00:00.0";
twoDayAgo=`date -d "$IncStart 2 days ago " +%Y%m%d `;
twoDayAgoYear=`echo ${twoDayAgo:0:4}`;
twoDayAgoMonth=`echo ${twoDayAgo:4:2}`;
twoDayAgoDay=`echo ${twoDayAgo:6:2}`;
twoDayAgoAll=${twoDayAgoYear}"-"${twoDayAgoMonth}"-"${twoDayAgoDay}" 00:00:00.0";
twoDayAgoAllFormat=${twoDayAgoYear}"-"${twoDayAgoMonth}"-"${twoDayAgoDay};
job_name=$0
#需要导出的数据pg表名
export_table_name=n_par_life_hrm_org;
#需要导出到pg的数据的临时文件名
sqoop_export_data_filename=${export_table_name};
#需要导出的数据pg列名
export_table_columns=statis_date,um_num,um_name,group_id,group_name,department_id,department_name,center_id,center_name
#需要导出到pg的数据的临时文件目录
sqoop_export_data_dir=/apps-data/hduser0101/an_pafc_safe/export/${sqoop_export_data_filename};
hadoop dfs -rmr ${sqoop_export_data_dir};
#创建用于导出到pg的临时数据
hive -v -e "set mapred.job.queue.name=${queueName};
set mapred.job.name=${job_name}_1;
use an_pafc_safe;
insert overwrite directory '${sqoop_export_data_dir}'
select
'${IncStartAllFormat}' as statis_date,
um_num,
um_name,
group_id,
group_name,
department_id,
department_name,
center_id,
center_name
from lnc_hdw_safe.n_par_life_hrm_org ;";
exitCodeCheck $?
#先删除目的数据库的数据2天前数
sqoop eval -Dmapred.job.queue.name=${queueName} \
--connect ${pg_connection} \
--username ${pg_username} \
--password ${pg_password} \
-query "delete from ${export_table_name} A where a.statis_date=date('${twoDayAgoAllFormat}')";
exitCodeCheck $?
#先删除目的数据库的数据,支持二次运行
sqoop eval -Dmapred.job.queue.name=${queueName} \
--connect ${pg_connection} \
--username ${pg_username} \
--password ${pg_password} \
-query "delete from ${export_table_name} A where a.statis_date=date('${IncStartAllFormat}')";
exitCodeCheck $?
#再导出数据
sqoop export -D mapred.job.name=${job_name}_2 -D sqoop.export.statements.per.transaction=4500 -D mapreduce.map.tasks=1 -D mapred.map.max.attempts=1 -D mapred.reduce.max.attempts=1 -D mapreduce.map.maxattempts=1 -D mapreduce.reduce.maxattempts=1 -D mapred.job.queue.name=${queueName} \
--connect ${pg_connection} \
--username ${pg_username} \
--password ${pg_password} \
--export-dir ${sqoop_export_data_dir} \
--verbose \
--num-mappers 1 \
--table ${export_table_name} \
--columns ${export_table_columns} \
--input-fields-terminated-by '\001' \
--input-lines-terminated-by '\n' \
--input-null-string '\\N' \
--input-null-non-string '\\N';
exitCodeCheck $?
sqoop导入脚本
#!/bin/sh
source ExitCodeCheck.sh
opts=$@
getparam(){
arg=$1
echo $opts |xargs -n1 |cut -b 2- |awk -F'=' '{if($1=="'"$arg"'") print $2}'
}
IncStart=`getparam inc_start`
IncEnd=`getparam inc_end`
oracle_connection=`getparam jdbc_str`
oracle_username=`getparam db_user`
oracle_password=`getparam db_psw`
dataName=`getparam db_sid`
queueName=`getparam hdp_queue`
hdfshostname=`getparam hdfs_host`;
IncStartYear=`echo ${IncStart:0:4}`;
IncStartMonth=`echo ${IncStart:4:2}`;
IncStartDay=`echo ${IncStart:6:2}`;
IncStartAll=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay}" 00:00:00.0";
IncStartAllFormat=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay};
IncEndYear=`echo ${IncEnd:0:4}`;
IncEndMonth=`echo ${IncEnd:4:2}`;
IncEndDay=`echo ${IncEnd:6:2}`;
IncEndAll=${IncEndYear}"-"${IncEndMonth}"-"${IncEndDay}" 00:00:00.0";
IncEndAllFormat=${IncEndYear}"-"${IncEndMonth}"-"${IncEndDay};
OneDayAgo=`date -d "$IncStart 1 days ago " +%Y%m%d `;
OneDayAgoYear=`echo ${OneDayAgo:0:4}`;
OneDayAgoMonth=`echo ${OneDayAgo:4:2}`;
OneDayAgoDay=`echo ${OneDayAgo:6:2}`;
OneDayAgoAll=${OneDayAgoYear}"-"${OneDayAgoMonth}"-"${OneDayAgoDay}" 00:00:00.0";
OneDayAgoAllFormat=${OneDayAgoYear}"-"${OneDayAgoMonth}"-"${OneDayAgoDay};
#任务名取脚本名
job_name=$0
#任务名取脚本名
job_name=$0
#目标表名
hive_table=AGG_MOBILE_HR_DEPT_ALL
#临时目录
target_dir=/apps-data/hduser0101/an_pafc_safe/import/${hive_table}
#删除临时目录,支持二次运行
hadoop dfs -rmr ${target_dir}
sqoop import -D mapred.job.queue.name=${queueName} -D mapred.job.name=${job_name} \
--connect ${oracle_connection} \
--username ${oracle_username} \
--password ${oracle_password} \
--query "SELECT \
PARENT_DEPARTMENT_CODE \
,DEPARTMENT_CODE \
,DEPARTMENT_NAME \
,DEPARTMENT_LEVEL \
,OPTION_ITEM \
,CR_MONTH \
,CR_YEAR \
,COMP_LY \
,COMP_YEAR_END \
,TLL_MONTH \
,TLL_MONTH_NUM \
,TLL_YEAR \
,TLL_YEAR_NUM \
,ZYL_MONTH \
,ZYL_MONTH_NUM \
,ZYL_YEAR \
,ZYL_YEAR_NUM \
,PLAN_DATA_M \
,PLAN_DATA_Y \
,C_PLAN_DATA_M \
,C_PLAN_DATA_Y \
,CR_MONTH_LY \
,CR_YEAR_LY \
,CREATED_BY \
,DATE_CREATED \
,UPDATED_BY \
,DATE_UPDATED \
,ORDER_NO \
,ZY_Y_CUR \
,TL_Y_CUR \
,LM_HR \
,ZY_M_LY \
,ZY_Y_LY \
,TL_M_LY \
,TL_Y_LY \
FROM AGG_MOBILE_HR_DEPT_ALL \
WHERE CANC_DATE = to_date('${IncStart}','yyyymmdd') \
and \$CONDITIONS " \
-m 1 \
--hive-table an_pafc_safe.${hive_table} \
--hive-drop-import-delims \
--fetch-size 5000 \
--hive-partition-key CANC_DATE \
--hive-partition-value ${IncStartAllFormat} \
--target-dir "${target_dir}" \
--hive-overwrite \
--null-string '\\N' \
--null-non-string '\\N' \
--hive-import;
exitCodeCheck $?
sqoop脚本(导入到pg库)
#!/bin/sh
source ExitCodeCheck.sh
opts=$@
getparam(){
arg=$1
echo $opts |xargs -n1 |cut -b 2- |awk -F'=' '{if($1=="'"$arg"'") print $2}'
}
IncStart=`getparam inc_start`
IncEnd=`getparam inc_end`
pg_connection=`getparam jdbc_str`
pg_username=`getparam db_user`
pg_password=`getparam db_psw`
dataName=`getparam db_sid`
queueName=`getparam hdp_queue`
hdfshostname=`getparam hdfs_host`
IncStartYear=`echo ${IncStart:0:4}`;
IncStartMonth=`echo ${IncStart:4:2}`;
IncStartDay=`echo ${IncStart:6:2}`;
IncStartAll=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay}" 00:00:00.0";
IncStartAllFormat=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay};
IncEndYear=`echo ${IncEnd:0:4}`;
IncEndMonth=`echo ${IncEnd:4:2}`;
IncEndDay=`echo ${IncEnd:6:2}`;
IncEndAll=${IncEndYear}"-"${IncEndMonth}"-"${IncEndDay}" 00:00:00.0";
twoDayAgo=`date -d "$IncStart 2 days ago " +%Y%m%d `;
twoDayAgoYear=`echo ${twoDayAgo:0:4}`;
twoDayAgoMonth=`echo ${twoDayAgo:4:2}`;
twoDayAgoDay=`echo ${twoDayAgo:6:2}`;
twoDayAgoAll=${twoDayAgoYear}"-"${twoDayAgoMonth}"-"${twoDayAgoDay}" 00:00:00.0";
twoDayAgoAllFormat=${twoDayAgoYear}"-"${twoDayAgoMonth}"-"${twoDayAgoDay};
job_name=$0
#需要导出的数据pg表名
export_table_name=n_par_life_hrm_org_excelude_camp;
#需要导出到pg的数据的临时文件名
sqoop_export_data_filename=${export_table_name};
#需要导出的数据pg列名
export_table_columns=statis_date,um_num,um_name,group_id,group_name,department_id,department_name,center_id,center_name
#需要导出到pg的数据的临时文件目录
sqoop_export_data_dir=/apps-data/hduser0101/an_pafc_safe/export/${sqoop_export_data_filename};
hadoop dfs -rmr ${sqoop_export_data_dir};
#创建用于导出到pg的临时数据
hive -v -e "set mapred.job.queue.name=${queueName};
set mapred.job.name=${job_name}_1;
use an_pafc_safe;
insert overwrite directory '${sqoop_export_data_dir}'
select
'${IncStartAllFormat}' as statis_date,
um_num,
um_name,
group_id,
group_name,
department_id,
department_name,
center_id,
center_name
from sx_360_safe.n_par_life_hrm_org_excelude_camp ;";
exitCodeCheck $?
#先删除目的数据库的数据2天前数
sqoop eval -Dmapred.job.queue.name=${queueName} \
--connect ${pg_connection} \
--username ${pg_username} \
--password ${pg_password} \
--verbose \
--query "delete from ${export_table_name} A where a.statis_date=date('${twoDayAgoAllFormat}')";
exitCodeCheck $?
#先删除目的数据库的数据,支持二次运行
sqoop eval -Dmapred.job.queue.name=${queueName} \
--connect ${pg_connection} \
--username ${pg_username} \
--password ${pg_password} \
--verbose \
--query "delete from ${export_table_name} A where a.statis_date=date('${IncStartAllFormat}')";
exitCodeCheck $?
#再导出数据
sqoop export -D mapred.job.name=${job_name}_2 -D sqoop.export.statements.per.transaction=4500 -D mapreduce.map.tasks=1 -D mapred.map.max.attempts=1 -D mapred.reduce.max.attempts=1 -D mapreduce.map.maxattempts=1 -D mapreduce.reduce.maxattempts=1 -D mapred.job.queue.name=${queueName} \
--connect ${pg_connection} \
--username ${pg_username} \
--password ${pg_password} \
--export-dir ${sqoop_export_data_dir} \
--verbose \
--num-mappers 1 \
--table ${export_table_name} \
--columns ${export_table_columns} \
--input-fields-terminated-by '\001' \
--input-lines-terminated-by '\n' \
--input-null-string '\\N' \
--input-null-non-string '\\N';
exitCodeCheck $?
sqoop脚本(Oracle到hive)
#!/bin/sh
source ExitCodeCheck.sh
opts=$@
getparam(){
arg=$1
echo $opts |xargs -n1 |cut -b 2- |awk -F'=' '{if($1=="'"$arg"'") print $2}'
}
IncStart=`getparam inc_start`
IncEnd=`getparam inc_end`
oracle_connection=`getparam jdbc_str`
oracle_username=`getparam db_user`
oracle_password=`getparam db_psw`
dataName=`getparam db_sid`
queueName=`getparam hdp_queue`
hdfshostname=`getparam hdfs_host`;
IncStartYear=`echo ${IncStart:0:4}`;
IncStartMonth=`echo ${IncStart:4:2}`;
IncStartDay=`echo ${IncStart:6:2}`;
IncStartAll=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay}" 00:00:00.0";
IncStartAllFormat=${IncStartYear}"-"${IncStartMonth}"-"${IncStartDay};
IncEndYear=`echo ${IncEnd:0:4}`;
IncEndMonth=`echo ${IncEnd:4:2}`;
IncEndDay=`echo ${IncEnd:6:2}`;
IncEndAll=${IncEndYear}"-"${IncEndMonth}"-"${IncEndDay}" 00:00:00.0";
IncEndAllFormat=${IncEndYear}"-"${IncEndMonth}"-"${IncEndDay};
OneDayAgo=`date -d "$IncStart 1 days ago " +%Y%m%d `;
OneDayAgoYear=`echo ${OneDayAgo:0:4}`;
OneDayAgoMonth=`echo ${OneDayAgo:4:2}`;
OneDayAgoDay=`echo ${OneDayAgo:6:2}`;
OneDayAgoAll=${OneDayAgoYear}"-"${OneDayAgoMonth}"-"${OneDayAgoDay}" 00:00:00.0";
OneDayAgoAllFormat=${OneDayAgoYear}"-"${OneDayAgoMonth}"-"${OneDayAgoDay};
#任务名取脚本名
job_name=$0
#任务名取脚本名
job_name=$0
#目标表名
hive_table=AGG_MOBILE_BUSSINESS_DEPT_ALL
#临时目录
target_dir=/apps-data/hduser0101/an_pafc_safe/import/${hive_table}
#删除临时目录,支持二次运行
hadoop dfs -rmr ${target_dir}
sqoop import -D mapred.job.queue.name=${queueName} -D mapred.job.name=${job_name} \
--connect ${oracle_connection} \
--username ${oracle_username} \
--password ${oracle_password} \
--query "SELECT \
PARENT_CODE \
,DEPARTMENT_CODE \
,DEPARTMENT_NAME \
,DEPARTMENT_LEVEL \
,MEASURES_CODE \
,SUM_DATA_DAY \
,SUM_DATA_MONTH \
,SUM_DATA_YEAR \
,MONTH_COMPLE \
,YEAR_COMPLE \
,YEACC_COMPLE \
,MONTH_INC \
,YEAR_INC \
,DATA_LASTYEAR_D \
,DATA_LASTYEAR_M \
,DATA_LASTYEAR_Y \
,OPTION_ITEM \
,CREATED_BY \
,DATE_CREATED \
,UPDATED_BY \
,DATE_UPDATED \
,ORDER_NO \
,DAY_INC \
FROM AGG_MOBILE_BUSSINESS_DEPT_ALL \
WHERE CANC_DATE = to_date('${IncStart}','yyyymmdd') \
and \$CONDITIONS " \
-m 1 \
--hive-table an_pafc_safe.${hive_table} \
--hive-drop-import-delims \
--fetch-size 5000 \
--hive-partition-key CANC_DATE \
--hive-partition-value ${IncStartAllFormat} \
--target-dir "${target_dir}" \
--hive-overwrite \
--null-string '\\N' \
--null-non-string '\\N' \
--hive-import;
exitCodeCheck $?
导数执行的语句
1。测试取下来(jar包)
cd /tmp/hduser0101
hadoop fs -get /apps/hduser0101/an_pafc_safe/lib/spark2phoenix.jar
2、开发put上去(jar包)
hadoop fs -put -f /appcom/tmp/spark2phoenix.jar /apps/hduser0101/an_pafc_safe/lib/spark2phoenix.jar
3、进入hive,使用对应的数据集市:
use an_pafc_safe; //使用的数据库
set mapred.queue.names=root.queue_0101_01; //使用队列
set mapreduce.job.queuename=root.queue_0101_01;
//spark执行SQL语句
spark-sql --master yarn-client --queue root.queue_0101_01
txt文件导入到hive表中的命令:
load data local inpath '/appcom/tmp/zeb/sx_ela_bp_info.txt' into table an_pafc_safe.sx_ela_bp_info;
txt文件导入到hive表中的命令:
load data local inpath '/appcom/tmp/zeb/sx_ela_bp_info.txt' into table an_pafc_safe.sx_ela_bp_info;
#执行第一个脚本所要传入的参数:
./hive_cgi_idld_lcdm_mit_client_group_info.sh -inc_start=20170701 -inc_end=20170801 -jdbc_str=jdbc:oracle:thin:@10.20.131.82:1526:srvp_d0mbi_paeye-mbi-life_1 -db_user=mbidata -db_psw=pa12ic34 -db_sid=LUSH0 -hdp_queue=root.queue_0101_01 -hdfs_host=hdfs://hdp-hdfs01
#第一个脚本存放的目录:
/apps/hduser0101/an_pafc_safe/hive_cgi
#第二个脚本以及之后的脚本的存放=目录:
/apps/hduser0101/sx_360_safe/spark_cgi
#sqoop脚本执行命令:
./sqoop_import_liferpt_agg_mobile_bussiness_dept_all.sh -inc_start=20170206 -inc_end=20170207 -jdbc_str=jdbc:oracle:thin:@d0lifrt.dbdev.paic.com.cn:1526:d0lifrt -db_user=LOLAPDATA -db_psw=patst2012 -db_sid=LUSH0 -hdp_queue=root.queue_0101_01 -hdfs_host=hdfs://hdp-hdfs01
#hive脚本执行命令:
./hive_mbi_epcis_life_plan.sh -hdp_queue=root.queue_0101_01 -hdfs_host=hdfs://hdp-hdfs01
————————————————
版权声明:本文为优快云博主「Small-Apple」的原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接及本声明。
原文链接:https://blog.youkuaiyun.com/qq_36732988/article/details/78778756
以防走丢,若侵权请联系删除。