hive依赖hadoop环境。
第一步,搭建hadoop环境
解压,拷贝到安装目录:注意自己的目录
tar -zxvf hadoop-2.7.7.tar.gz
mv hadoop-2.7.7 hadoop
cp -R hadoop /user/local/soft
配置hadoop环境变量
JAVA_HOME=/usr/jdk
JRE_HOME=/usr/jdk/jre
CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export PATH JAVA_HOME CLASSPATH
export HADOOP_PREFIX=/usr/local/soft/hadoop
export HADOOP_HOME=$HADOOP_PREFIX
export HADOOP_MAPRED_HOME=$HADOOP_PREFIX
export HADOOP_COMMON_HOME=$HADOOP_PREFIX
export HADOOP_HDFS_HOME=$HADOOP_PREFIX
export YARN_HOME=$HADOOP_PREFIX
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOO_PREFIX/lib/native
export HADOOP_INSTALL=$HADOOP_PREFIX
export PATH=$PATH:$HADOOP_PREFIX/bin:$HADOOP_PREFIX/sbin
安装无登录SSH
yum install openssh-server
#一直点就行
ssh-keygen -t rsa
#验证
ssh localhost
hadoop启动配置
配置hadoop_env.sh的jdk
# The java implementation to use.
export JAVA_HOME=/usr/jdk
配置 core-site.xml
cd /usr/local/soft/hadoop/etc/hadoop/
vi core-site.xml
<configuration>
<!-- 指定HDFS老大(namenode)的通信地址 -->
<property>
<name>fs.defaultFS</name>
<value>hdfs://127.0.0.1:9000</value>
</property>
<!-- 指定hadoop运行时产生文件的存储路径 -->
<property>