#!/bin/bash
# Program:
# uninstall ambari automatic
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH #取得集群的所有主机名,这里需要注意:/etc/hosts配置的IP和主机名只能用制表符,cut默认分割符,其他情况参考注释行
#hostList=$(cat /etc/hosts | tail -n + | cut -d ' ' -f )
hostList=$(cat /etc/hosts | tail -n + | cut -f )
yumReposDir=/etc/yum.repos.d/
alterNativesDir=/etc/alternatives/
pingCount=
logPre=HDP read -p "Please input your master hostname: " master
master=${master:-"master"}
ssh $master "ambari-server stop"
#重置ambari数据库
ssh $master "ambari-server reset" for host in $hostList
do
echo $host
#检测主机的连通性
unPing=$(ping $host -c $pingCount | grep 'Unreachable' | wc -l)
if [ "$unPing" == "$pingCount" ]; then
echo -e "$logPre======>$host is Unreachable,please check '/etc/hosts' file"
continue
fi echo "$logPre======>$host deleting... \n"
#.)删除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo
ssh $host "cd $yumReposDir"
ssh $host "rm -rf $yumReposDir/hdp.repo"
ssh $host "rm -rf $yumReposDir/HDP*"
ssh $host "rm -rf $yumReposDir/ambari.repo" # 删除相关用户
ssh $host "python /usr/lib/ambari-agent/lib/ambari_agent/HostCleanup.py --silent" # 删除HDP相关的安装包
hdppackagelist=$(yum list installed | grep HDP | cut -d ' ' -f )
for package in $hdppackagelist
do
echo "uninstalling $package"
ssh $host "yum remove -y $package"
done
# 删除ambari相关安装包
ambaripackagelist=$(yum list installed | grep ambari | cut -d ' ' -f )
for package in $ambaripackagelist
do
echo "uninstalling $package"
ssh $host "yum remove -y $package"
done # 删除快捷方式
ssh $host "cd $alterNativesDir"
ssh $host "rm -rf hadoop-etc"
ssh $host "rm -rf zookeeper-conf"
ssh $host "rm -rf hbase-conf"
ssh $host "rm -rf hadoop-log"
ssh $host "rm -rf hadoop-lib"
ssh $host "rm -rf hadoop-default"
ssh $host "rm -rf oozie-conf"
ssh $host "rm -rf hcatalog-conf"
ssh $host "rm -rf hive-conf"
ssh $host "rm -rf hadoop-man"
ssh $host "rm -rf sqoop-conf"
ssh $host "rm -rf hadoop-confone" # 删除日志文件
ssh $host "rm -rf /var/log/ambari*"
ssh $host "rm -rf /var/log/hadoop*"
ssh $host "rm -rf /var/log/hbase"
ssh $host "rm -rf /var/log/hive"
ssh $host "rm -rf /var/log/nagios"
ssh $host "rm -rf /var/log/oozie"
ssh $host "rm -rf /var/log/zookeeper"
ssh $host "rm -rf /var/log/falcon"
ssh $host "rm -rf /var/log/flume"
ssh $host "rm -rf /var/log/hive*"
ssh $host "rm -rf /var/log/knox"
ssh $host "rm -rf /var/log/solr"
ssh $host "rm -rf /var/log/hst" # 删除hadoop文件夹,包括HDFS数据
ssh $host "rm -rf /hadoop"
ssh $host "rm -rf /hdfs/hadoop"
ssh $host "rm -rf /hdfs/lost+found"
ssh $host "rm -rf /hdfs/var"
ssh $host "rm -rf /local/opt/hadoop"
ssh $host "rm -rf /tmp/hadoop"
ssh $host "rm -rf /usr/bin/hadoop"
ssh $host "rm -rf /usr/hdp"
ssh $host "rm -rf /var/hadoop" # 删除所有节点上的配置文件夹
ssh $host "rm -rf /etc/ambari-agent"
ssh $host "rm -rf /etc/ambari-metrics-grafana"
ssh $host "rm -rf /etc/ambari-server"
ssh $host "rm -rf /etc/ams-hbase"
ssh $host "rm -rf /etc/falcon"
ssh $host "rm -rf /etc/flume"
ssh $host "rm -rf /etc/hadoop"
ssh $host "rm -rf /etc/hadoop-httpfs"
ssh $host "rm -rf /etc/hbase"
ssh $host "rm -rf /etc/hive"
ssh $host "rm -rf /etc/hive-hcatalog"
ssh $host "rm -rf /etc/hive-webhcat"
ssh $host "rm -rf /etc/hive2"
ssh $host "rm -rf /etc/hst"
ssh $host "rm -rf /etc/knox"
ssh $host "rm -rf /etc/livy"
ssh $host "rm -rf /etc/mahout"
ssh $host "rm -rf /etc/oozie"
ssh $host "rm -rf /etc/phoenix"
ssh $host "rm -rf /etc/pig"
ssh $host "rm -rf /etc/ranger-admin"
ssh $host "rm -rf /etc/ranger-usersync"
ssh $host "rm -rf /etc/spark2"
ssh $host "rm -rf /etc/tez"
ssh $host "rm -rf /etc/tez_hive2"
ssh $host "rm -rf /etc/zookeeper" # 删除所有节点上的PID
ssh $host "rm -rf /var/run/ambari-agent"
ssh $host "rm -rf /var/run/ambari-metrics-grafana"
ssh $host "rm -rf /var/run/ambari-server"
ssh $host "rm -rf /var/run/falcon"
ssh $host "rm -rf /var/run/flume"
ssh $host "rm -rf /var/run/hadoop"
ssh $host "rm -rf /var/run/hadoop-mapreduce"
ssh $host "rm -rf /var/run/hadoop-yarn"
ssh $host "rm -rf /var/run/hbase"
ssh $host "rm -rf /var/run/hive"
ssh $host "rm -rf /var/run/hive-hcatalog"
ssh $host "rm -rf /var/run/hive2"
ssh $host "rm -rf /var/run/hst"
ssh $host "rm -rf /var/run/knox"
ssh $host "rm -rf /var/run/oozie"
ssh $host "rm -rf /var/run/webhcat"
ssh $host "rm -rf /var/run/zookeeper" # 删除所有节点上的库文件
ssh $host "rm -rf /usr/lib/ambari-agent"
ssh $host "rm -rf /usr/lib/ambari-infra-solr-client"
ssh $host "rm -rf /usr/lib/ambari-metrics-hadoop-sink"
ssh $host "rm -rf /usr/lib/ambari-metrics-kafka-sink"
ssh $host "rm -rf /usr/lib/ambari-server-backups"
ssh $host "rm -rf /var/lib/ambari-agent"
ssh $host "rm -rf /var/lib/ambari-metrics-grafana"
ssh $host "rm -rf /var/lib/ambari-server"
ssh $host "rm -rf /usr/lib/ams-hbase"
ssh $host "rm -rf /var/lib/flume"
ssh $host "rm -rf /var/lib/hadoop-hdfs"
ssh $host "rm -rf /var/lib/hadoop-mapreduce"
ssh $host "rm -rf /var/lib/hadoop-yarn"
ssh $host "rm -rf /var/lib/hive"
ssh $host "rm -rf /var/lib/hive2"
ssh $host "rm -rf /var/lib/knox"
ssh $host "rm -rf /var/lib/smartsense"
ssh $host "rm -rf /var/lib/storm"
ssh $host "rm -rf /usr/lib/hadoop"
ssh $host "rm -rf /usr/lib/hbase"
ssh $host "rm -rf /usr/lib/hcatalog"
ssh $host "rm -rf /usr/lib/oozie"
ssh $host "rm -rf /usr/lib/zookeeper"
ssh $host "rm -rf /var/lib/ganglia"
ssh $host "rm -rf /var/lib/oozie"
ssh $host "rm -rf /var/lib/zookeeper"
ssh $host "rm -rf /var/tmp/oozie"
ssh $host "rm -rf /var/nagios" # 删除临时文件
echo "删除临时文件..."
ssh $host "rm -rf /tmp/*" # 删除所有节点上的符号链接
ssh $host "rm -rf accumulo"
ssh $host "rm -rf atlas-start"
ssh $host "rm -rf atlas-stop"
ssh $host "rm -rf beeline"
ssh $host "rm -rf falcon"
ssh $host "rm -rf flume-ng"
ssh $host "rm -rf hbase"
ssh $host "rm -rf hcat"
ssh $host "rm -rf hdfs"
ssh $host "rm -rf hive"
ssh $host "rm -rf hiveserver2"
ssh $host "rm -rf kafka"
ssh $host "rm -rf mahout"
ssh $host "rm -rf /usr/bin/mapred"
ssh $host "rm -rf /usr/bin/oozie"
ssh $host "rm -rf /usr/bin/oozied.sh"
ssh $host "rm -rf /usr/bin/phoenix-psql"
ssh $host "rm -rf /usr/bin/phoenix-queryserver"
ssh $host "rm -rf /usr/bin/phoenix-sqlline"
ssh $host "rm -rf /usr/bin/phoenix-sqlline-thin"
ssh $host "rm -rf /usr/bin/pig"
ssh $host "rm -rf /usr/bin/python-wrap"
ssh $host "rm -rf /usr/bin/ranger-admin"
ssh $host "rm -rf /usr/bin/ranger-admin-start"
ssh $host "rm -rf /usr/bin/ranger-admin-stop"
ssh $host "rm -rf /usr/bin/ranger-kms"
ssh $host "rm -rf /usr/bin/ranger-usersync"
ssh $host "rm -rf /usr/bin/ranger-usersync-start"
ssh $host "rm -rf /usr/bin/ranger-usersync-stop"
ssh $host "rm -rf /usr/bin/slider"
ssh $host "rm -rf /usr/bin/sqoop"
ssh $host "rm -rf /usr/bin/sqoop-codegen"
ssh $host "rm -rf /usr/bin/sqoop-create-hive-table"
ssh $host "rm -rf /usr/bin/sqoop-eval"
ssh $host "rm -rf /usr/bin/sqoop-export"
ssh $host "rm -rf /usr/bin/sqoop-help"
ssh $host "rm -rf /usr/bin/sqoop-import"
ssh $host "rm -rf /usr/bin/sqoop-import-all-tables"
ssh $host "rm -rf /usr/bin/sqoop-job"
ssh $host "rm -rf /usr/bin/sqoop-list-databases"
ssh $host "rm -rf /usr/bin/sqoop-list-tables"
ssh $host "rm -rf /usr/bin/sqoop-merge"
ssh $host "rm -rf /usr/bin/sqoop-metastore"
ssh $host "rm -rf /usr/bin/sqoop-version"
ssh $host "rm -rf /usr/bin/storm"
ssh $host "rm -rf /usr/bin/storm-slider"
ssh $host "rm -rf /usr/bin/worker-lanucher"
ssh $host "rm -rf /usr/bin/yarn"
ssh $host "rm -rf /usr/bin/zookeeper-client"
ssh $host "rm -rf /usr/bin/zookeeper-server"
ssh $host "rm -rf /usr/bin/zookeeper-server-cleanup" # 删除数据库
ssh $host "yum remove -y postgresql"
ssh $host "rm -rf /var/lib/pgsql"
# 删除ambari相关包
# ssh $host "yum remove -y ambari-*"
# ssh $host "rm -rf /var/lib/ambari*" echo "$logPre======>$host is done! \n"
done

ambari 卸载脚本的更多相关文章

  1. virtualbox和vagrant卸载脚本在macbook

    virtualbox和vagrant在macbook版本的安装文件内,都有一个卸载脚本uninstall.tool vagrant2.1.5卸载脚本: #!/usr/bin/env bash #--- ...

  2. ambari删除脚本

    #.删除hdp.repo和hdp-util.repo cd /etc/yum.repos.d/ rm -rf hdp* rm -rf HDP* #rm -rf ambari* #.删除安装包 #用yu ...

  3. 【转载】linux-查询rpm包相关安装、卸载脚本

        测试过程中,有时要测试开发自己打的rpm包,为了确认打包正确,需要查询rpm包相关安装.卸载脚本,可以使用命令:   [root@6 /]#rpm -q --scripts mysql pos ...

  4. ambari卸载集群

    #1.删除hdp.repo和hdp-util.repo cd /etc/yum.repos.d/ rm -rf hdp* rm -rf HDP* #rm -rf ambari* #2.删除安装包 #用 ...

  5. erlang mac os 10.9 卸载脚本

    #!/bin/bash if [ "$(id -u)" != "0" ]; then echo "Insufficient permissions. ...

  6. cloudera manager 5.3完整卸载脚本

    service cloudera-scm-agent stop service cloudera-scm-agent stop umount /var/run/cloudera-scm-agent/p ...

  7. Ubuntu 软件卸载脚本(卸载软件 + 移除配置文件 + 移除依赖项)

    #!/bin/bash function z-apt-uninstall() { if [ ! $1 ] then echo "z-apt-uninstall error: software ...

  8. windows 服务安装脚本拾遗

    转自:http://blog.csdn.net/susubuhui/article/details/7881096 1.安装脚本 echo 请按任意键开始安装客户管理平台的后台服务 echo. pau ...

  9. Linux环境下JDK/Eclipse一键安装脚本

    -------------------------------------------------------------------- author:jiangxin Email:jiangxinn ...

随机推荐

  1. 《http权威指南》读书笔记15

    概述 最近对http很感兴趣,于是开始看<http权威指南>.别人都说这本书有点老了,而且内容太多.我个人觉得这本书写的太好了,非常长知识,让你知道关于http的很多概念,不仅告诉你怎么做 ...

  2. 转:Python: 什么是*args和**kwargs

    今天看源码的时候发现一个是*args和**kwargs,一看就能知道args是神马,就是所有参数的数组,kwargs就不知道了,google一下,一个人的blog写的比较简单明了,秒懂了~~kwarg ...

  3. ubuntu 16.04 下安装NVDIA的库(CUDA+CUDNN)

    立志学习神经网络的同学,为了让它能够跑快一点,估计英伟达的GPU是要折腾一番的. 首先看一下什么是CUDA ? CUDA(Compute Unified Device Architecture),是显 ...

  4. 剑指offer【03】- 从尾到头打印链表(4种实现方法)

    题目:从尾到头打印链表 考点:链表 题目描述:输入一个链表,按链表值从尾到头的顺序返回一个ArrayList. 法一:ArrayList头插法 /** * public class ListNode ...

  5. ionic2 基于ngx-translate实现多语言切换,翻译

    介绍 ngx-translate 是Angular 2+的国际化(i18n)库,在github的地址是箭头 https://github.com/ngx-translate/core 将ngx-tra ...

  6. VueJs(14)---理解Vuex

    理解Vuex 一.Vuex 是什么? 首先我们来分析一种实际开发中用vue.js的场景,你有n个组件,当你改变一个组件数据的时候需要同时改变其它n个组件的数据,那么我想你可能会对 vue 组件之间的通 ...

  7. mysql 开发基础系列3 日期数据类型

    日期类型 如果要用来表示年月日,通常用DATE 来表示. 如果要用来表示年月日时分秒,通常用DATETIME 表示. 如果只用来表示时分秒,通常用TIME 来表示. TIMESTAMP表示格式 :YY ...

  8. Android--序列化XML数据

    前言 之前有讲过在Android下如何解析XML文件的内容,这篇博客讲讲如何把一个对象序列化为XML格式,有时候一些项目中需要传递一些XML格式的数据.而对于如何解析XML,不了解的朋友可以看看其他三 ...

  9. 读写锁ReentrantReadWriteLock:读读共享,读写互斥,写写互斥

    介绍 DK1.5之后,提供了读写锁ReentrantReadWriteLock,读写锁维护了一对锁:一个读锁,一个写锁.通过分离读锁和写锁,使得并发性相比一般的排他锁有了很大提升.在读多写少的情况下, ...

  10. spring面试问题与答案集锦

    我收集了一些spring面试的问题,这些问题可能会在下一次技术面试中遇到.对于其他spring模块,我将单独分享面试问题和答案. 如果你能将在以前面试中碰到的,且你认为这些应该是一个有spring经验 ...