ambari卸载

1.通过ambari将集群中的所用组件都关闭,如果关闭不了,直接kill-9 XXX

2.所有节点运行

# 不同版本位置可能不同
python /usr/lib/python2.6/site-packages/ambari_agent/HostCleanup.py –-silent –-skip=users

3.所有节点删除hadoop包

yum remove -y hadoop_2* hdp-select* ranger* zookeeper* bigtop* atlas-metadata*  spark* slide* strom* hive* ambari* oozie* pig* tez* hbase* knox* storm* accumulo* falcon* smartsense-hst slider*

4.以上命令可能不全,执行完一下命令后,再执行

 rpm –qa|grep Ambari
  rpm –qa|grep ambari

5.关闭ambari-server,ambari-agent

ambari-server stop
ambari-agent stop

6.卸载ambari-server,ambari-agent(所有节点)

yum erase ambari-server -y
yum erase ambari-agent -y

7.删除存储库

rm -rf /etc/yum.repos.d/ambari.repo /etc/yum.repos.d/HDP*

yum clean all

8.删除数据库数据

rm -rf /var/lib/pgsql

9.删除日志文件夹

sudo rm -rf /var/log/ambari-agent
sudo rm -rf /var/log/ambari-metrics-grafana
sudo rm -rf /var/log/ambari-metrics-monitor
sudo rm -rf /var/log/ambari-server/
sudo rm -rf /var/log/falcon
sudo rm -rf /var/log/flume
sudo rm -rf /var/log/hadoop
sudo rm -rf /var/log/hadoop-mapreduce
sudo rm -rf /var/log/hadoop-yarn
sudo rm -rf /var/log/hive
sudo rm -rf /var/log/hive-hcatalog
sudo rm -rf /var/log/hive2
sudo rm -rf /var/log/hst
sudo rm -rf /var/log/knox
sudo rm -rf /var/log/oozie
sudo rm -rf /var/log/solr
sudo rm -rf /var/log/zookeeper

10.删除hadoop文件夹

sudo rm -rf /hadoop/*
sudo rm -rf /hdfs/hadoop
sudo rm -rf /hdfs/lost+found
sudo rm -rf /hdfs/var
sudo rm -rf /local/opt/hadoop
sudo rm -rf /tmp/hadoop
sudo rm -rf /usr/bin/hadoop
sudo rm -rf /usr/hdp
sudo rm -rf /var/hadoop

11.删除配置文件

sudo rm -rf /etc/ambari-agent
sudo rm -rf /etc/ambari-metrics-grafana
sudo rm -rf /etc/ambari-server
sudo rm -rf /etc/ams-hbase
sudo rm -rf /etc/falcon
sudo rm -rf /etc/flume
sudo rm -rf /etc/hadoop
sudo rm -rf /etc/hadoop-httpfs
sudo rm -rf /etc/hbase
sudo rm -rf /etc/hive
sudo rm -rf /etc/hive-hcatalog
sudo rm -rf /etc/hive-webhcat
sudo rm -rf /etc/hive2
sudo rm -rf /etc/hst
sudo rm -rf /etc/knox
sudo rm -rf /etc/livy
sudo rm -rf /etc/mahout
sudo rm -rf /etc/oozie
sudo rm -rf /etc/phoenix
sudo rm -rf /etc/pig
sudo rm -rf /etc/ranger-admin
sudo rm -rf /etc/ranger-usersync
sudo rm -rf /etc/spark2
sudo rm -rf /etc/tez
sudo rm -rf /etc/tez_hive2
sudo rm -rf /etc/zookeeper

12.删除PID

sudo rm -rf /var/run/ambari-agent
sudo rm -rf /var/run/ambari-metrics-grafana
sudo rm -rf /var/run/ambari-server
sudo rm -rf /var/run/falcon
sudo rm -rf /var/run/flume
sudo rm -rf /var/run/hadoop
sudo rm -rf /var/run/hadoop-mapreduce
sudo rm -rf /var/run/hadoop-yarn
sudo rm -rf /var/run/hbase
sudo rm -rf /var/run/hive
sudo rm -rf /var/run/hive-hcatalog
sudo rm -rf /var/run/hive2
sudo rm -rf /var/run/hst
sudo rm -rf /var/run/knox
sudo rm -rf /var/run/oozie
sudo rm -rf /var/run/webhcat
sudo rm -rf /var/run/zookeeper

13.删除库文件夹

sudo rm -rf /usr/lib/ambari-agent
sudo rm -rf /usr/lib/ambari-infra-solr-client
sudo rm -rf /usr/lib/ambari-metrics-hadoop-sink
sudo rm -rf /usr/lib/ambari-metrics-kafka-sink
sudo rm -rf /usr/lib/ambari-server-backups
sudo rm -rf /usr/lib/ams-hbase
sudo rm -rf /usr/lib/mysql
sudo rm -rf /var/lib/ambari-agent
sudo rm -rf /var/lib/ambari-metrics-grafana
sudo rm -rf /var/lib/ambari-server
sudo rm -rf /var/lib/flume
sudo rm -rf /var/lib/hadoop-hdfs
sudo rm -rf /var/lib/hadoop-mapreduce
sudo rm -rf /var/lib/hadoop-yarn
sudo rm -rf /var/lib/hive2
sudo rm -rf /var/lib/knox
sudo rm -rf /var/lib/smartsense
sudo rm -rf /var/lib/storm

14.删除临时文件夹

sudo rm -rf /var/tmp/*

15.删除软连接

#尤其是检查文件夹/usr/sbin和/usr/lib/python2.6/site-packages


16.删除用户

sudo userdel oozie
sudo userdel hive
sudo userdel ambari-qa
sudo userdel flume 
sudo userdel hdfs 
sudo userdel knox 
sudo userdel storm 
sudo userdel mapred
sudo userdel hbase 
sudo userdel tez 
sudo userdel zookeeper
sudo userdel kafka 
sudo userdel falcon
sudo userdel sqoop 
sudo userdel yarn 
sudo userdel hcat
sudo userdel atlas
sudo userdel spark
sudo userdel ams
sudo userdel zeppelin
 
sudo rm -rf /home/atlas
sudo rm -rf /home/accumulo
sudo rm -rf /home/hbase
sudo rm -rf /home/hive
sudo rm -rf /home/oozie
sudo rm -rf /home/storm
sudo rm -rf /home/yarn
sudo rm -rf /home/ambari-qa
sudo rm -rf /home/falcon
sudo rm -rf /home/hcat
sudo rm -rf /home/kafka
sudo rm -rf /home/mahout
sudo rm -rf /home/spark
sudo rm -rf /home/tez
sudo rm -rf /home/zookeeper
sudo rm -rf /home/flume
sudo rm -rf /home/hdfs
sudo rm -rf /home/knox
sudo rm -rf /home/mapred
sudo rm -rf /home/sqoop
 
sudo rm -rf /var/lib/ambari*
sudo rm -rf /usr/lib/python2.6/site-packages/ambari_*
sudo rm -rf /usr/lib/python2.6/site-packages/resource_management
sudo rm -rf /usr/lib/ambari-*
 
sudo rm -rf /etc/ambari-*
sudo rm -rf /etc/hadoop
sudo rm -rf /etc/hbase
sudo rm -rf /etc/hive
sudo rm -rf /etc/hive2 
sudo rm -rf /etc/oozie
sudo rm -rf /etc/sqoop 
sudo rm -rf /etc/zookeeper
sudo rm -rf /etc/flume 
sudo rm -rf /etc/storm 
sudo rm -rf /etc/tez_hive2 
sudo rm -rf /etc/spark2 
sudo rm -rf /etc/phoenix 
sudo rm -rf /etc/pig 
sudo rm -rf /etc/hive-hcatalog
sudo rm -rf /etc/tez 
sudo rm -rf /etc/falcon 
sudo rm -rf /etc/knox 
sudo rm -rf /etc/hive-webhcat
sudo rm -rf /etc/kafka 
sudo rm -rf /etc/slider 
sudo rm -rf /etc/storm-slider-client
sudo rm -rf /etc/spark 
 
sudo rm -rf /var/run/spark
sudo rm -rf /var/run/hadoop
sudo rm -rf /var/run/hbase
sudo rm -rf /var/run/zookeeper
sudo rm -rf /var/run/flume
sudo rm -rf /var/run/storm
sudo rm -rf /var/run/webhcat
sudo rm -rf /var/run/hadoop-yarn
sudo rm -rf /var/run/hadoop-mapreduce
sudo rm -rf /var/run/kafka
sudo rm -rf /var/run/hive	
sudo rm -rf /var/run/oozie	
sudo rm -rf /var/run/sqoop	
sudo rm -rf /var/run/hive-hcatalog	
sudo rm -rf /var/run/falcon	
sudo rm -rf /var/run/hadoop-hdfs	
sudo rm -rf /var/run/ambari-metrics-collector
sudo rm -rf /var/run/ambari-metrics-monitor	
sudo rm -rf /var/log/hadoop-hdfs	
sudo rm -rf /var/log/hive-hcatalog
sudo rm -rf /var/log/ambari-metrics-monitor
sudo rm -rf /var/log/hadoop
sudo rm -rf /var/log/hbase
sudo rm -rf /var/log/flume
sudo rm -rf /var/log/sqoop
sudo rm -rf /var/log/ambari-server
sudo rm -rf /var/log/ambari-agent
sudo rm -rf /var/log/storm
sudo rm -rf /var/log/hadoop-yarn
sudo rm -rf /var/log/hadoop-mapreduce
sudo rm -rf /var/log/knox 
sudo rm -rf /var/lib/slider
 
sudo rm -rf /usr/lib/flume
sudo rm -rf /usr/lib/storm
sudo rm -rf /var/lib/hive 
sudo rm -rf /var/lib/oozie
sudo rm -rf /var/lib/flume
sudo rm -rf /var/lib/hadoop-yarn
sudo rm -rf /var/lib/hadoop-mapreduce
sudo rm -rf /var/lib/hadoop-hdfs
sudo rm -rf /var/lib/zookeeper
sudo rm -rf /var/lib/knox 
sudo rm -rf /var/log/hive 
sudo rm -rf /var/log/oozie
sudo rm -rf /var/log/zookeeper
sudo rm -rf /var/log/falcon
sudo rm -rf /var/log/webhcat
sudo rm -rf /var/log/spark
sudo rm -rf /var/tmp/oozie
sudo rm -rf /tmp/ambari-qa
sudo rm -rf /tmp/hive 
sudo rm -rf /var/hadoop
sudo rm -rf /hadoop/falcon
sudo rm -rf /tmp/hadoop 
sudo rm -rf /tmp/hadoop-hdfs
sudo rm -rf /usr/hdp
sudo rm -rf /usr/hadoop
sudo rm -rf /opt/hadoop
sudo rm -rf /tmp/hadoop
sudo rm -rf /var/hadoop
sudo rm -rf /hadoop
 
sudo rm -rf /usr/bin/worker-lanucher
sudo rm -rf /usr/bin/zookeeper-client
sudo rm -rf /usr/bin/zookeeper-server
sudo rm -rf /usr/bin/zookeeper-server-cleanup
sudo rm -rf /usr/bin/yarn 
sudo rm -rf /usr/bin/storm
sudo rm -rf /usr/bin/storm-slider 
sudo rm -rf /usr/bin/worker-lanucher
sudo rm -rf /usr/bin/storm
sudo rm -rf /usr/bin/storm-slider 
sudo rm -rf /usr/bin/sqoop 
sudo rm -rf /usr/bin/sqoop-codegen 
sudo rm -rf /usr/bin/sqoop-create-hive-table 
sudo rm -rf /usr/bin/sqoop-eval 
sudo rm -rf /usr/bin/sqoop-export 
sudo rm -rf /usr/bin/sqoop-help 
sudo rm -rf /usr/bin/sqoop-import 
sudo rm -rf /usr/bin/sqoop-import-all-tables 
sudo rm -rf /usr/bin/sqoop-job 
sudo rm -rf /usr/bin/sqoop-list-databases 
sudo rm -rf /usr/bin/sqoop-list-tables 
sudo rm -rf /usr/bin/sqoop-merge 
sudo rm -rf /usr/bin/sqoop-metastore 
sudo rm -rf /usr/bin/sqoop-version 
sudo rm -rf /usr/bin/slider 
sudo rm -rf /usr/bin/ranger-admin-start 
sudo rm -rf /usr/bin/ranger-admin-stop 
sudo rm -rf /usr/bin/ranger-kms
sudo rm -rf /usr/bin/ranger-usersync-start
sudo rm -rf /usr/bin/ranger-usersync-stop
sudo rm -rf /usr/bin/pig 
sudo rm -rf /usr/bin/phoenix-psql 
sudo rm -rf /usr/bin/phoenix-queryserver 
sudo rm -rf /usr/bin/phoenix-sqlline 
sudo rm -rf /usr/bin/phoenix-sqlline-thin 
sudo rm -rf /usr/bin/oozie 
sudo rm -rf /usr/bin/oozied.sh 
sudo rm -rf /usr/bin/mapred 
sudo rm -rf /usr/bin/mahout 
sudo rm -rf /usr/bin/kafka 
sudo rm -rf /usr/bin/hive 
sudo rm -rf /usr/bin/hiveserver2 
sudo rm -rf /usr/bin/hbase
sudo rm -rf /usr/bin/hcat 
sudo rm -rf /usr/bin/hdfs 
sudo rm -rf /usr/bin/hadoop 
sudo rm -rf /usr/bin/flume-ng 
sudo rm -rf /usr/bin/falcon 
sudo rm -rf /usr/bin/beeline
sudo rm -rf /usr/bin/atlas-start 
sudo rm -rf /usr/bin/atlas-stop 
sudo rm -rf /usr/bin/accumulo

17.在所有节点上运行find / -name **

find / -name ambari
find / -name accumulo
find / -name atlas
find / -name beeline
find / -name falcon
find / -name flume
find / -name hadoop
find / -name hbase
find / -name hcat
find / -name hdfs
find / -name hdp
find / -name hive
find / -name hiveserver2
find / -name kafka
find / -name mahout
find / -name mapred
find / -name oozie
find / -name phoenix
find / -name pig
find / -name ranger
find / -name slider
find / -name sqoop
find / -name storm
find / -name yarn
find / -name zookeeper

7.删除Mysql数据库

drop database ambari;
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值