#默认抽昨天的数据,如果传入日期,则抽取传入日期那天的数据
dt=`date -d "1 days ago" +"%Y%m%d"`
if [ $# -eq 1 ];then
dt=`date -d "$1" +"%Y%m%d"`
fi
echo $dt
HIVE_DB_NAME=bdpa
MYSQL_HOST="10.202.12.12"
MYSQL_USER="ss"
MYSQL_PASSWORD="password"
MYSQL_DB="eee"
MYSQL_URL="jdbc:mysql://${MYSQL_HOST}:3306/${MYSQL_DB} --username ${MYSQL_USER} --password ${MYSQL_PASSWORD}"
function sqoop_mysql_to_hdfs(){
TABLE_NAME="$1"
PATITION_TIME="$2"
APPEND_FIELD="$3"
#hadoop fs -rm -r /user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}
#hadoop fs -rm -r /user/hdfs/${TABLE_NAME}
sqoop import -m 1 \
--connect ${MYSQL_URL} \
--query "select * from ${TABLE_NAME} where DATE_FORMAT(${APPEND_FIELD},'%Y%m%d') = '${PATITION_TIME}' and \$CONDITIONS" \
--target-dir "/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}" \
--fields-terminated-by '\002' \
--hive-drop-import-delims \
--null-string '\\N' \
--null-non-string '\\N'
}
function sqoop_yesterday_append(){
TABLE_NAME="$1"
APPEND_FIELD="$2"
echo "aa"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} drop partition(inc_day='${dt}');"
sqoop_mysql_to_hdfs "${TABLE_NAME}" "${dt}" "${APPEND_FIELD}"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} add partition(inc_day='${dt}') location '/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${dt}';"
}
echo "begin sqoop invoice_path"
sqoop_yesterday_append invoice_path create_tm
echo "begin sqoop invoice_result"
sqoop_yesterday_append invoice_result create_tm
dt=`date -d "1 days ago" +"%Y%m%d"`
if [ $# -eq 1 ];then
dt=`date -d "$1" +"%Y%m%d"`
fi
echo $dt
HIVE_DB_NAME=bdpa
MYSQL_HOST="10.202.12.12"
MYSQL_USER="ss"
MYSQL_PASSWORD="password"
MYSQL_DB="eee"
MYSQL_URL="jdbc:mysql://${MYSQL_HOST}:3306/${MYSQL_DB} --username ${MYSQL_USER} --password ${MYSQL_PASSWORD}"
function sqoop_mysql_to_hdfs(){
TABLE_NAME="$1"
PATITION_TIME="$2"
APPEND_FIELD="$3"
#hadoop fs -rm -r /user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}
#hadoop fs -rm -r /user/hdfs/${TABLE_NAME}
sqoop import -m 1 \
--connect ${MYSQL_URL} \
--query "select * from ${TABLE_NAME} where DATE_FORMAT(${APPEND_FIELD},'%Y%m%d') = '${PATITION_TIME}' and \$CONDITIONS" \
--target-dir "/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}" \
--fields-terminated-by '\002' \
--hive-drop-import-delims \
--null-string '\\N' \
--null-non-string '\\N'
}
function sqoop_yesterday_append(){
TABLE_NAME="$1"
APPEND_FIELD="$2"
echo "aa"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} drop partition(inc_day='${dt}');"
sqoop_mysql_to_hdfs "${TABLE_NAME}" "${dt}" "${APPEND_FIELD}"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} add partition(inc_day='${dt}') location '/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${dt}';"
}
echo "begin sqoop invoice_path"
sqoop_yesterday_append invoice_path create_tm
echo "begin sqoop invoice_result"
sqoop_yesterday_append invoice_result create_tm