- 下载hive
wget http://mirror.bit.edu.cn/apache/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz
- 安装
tar -zxvf apache-hive-3.1.2-bin.tar.gz -C /root/apps #解压
mv apache-hive-3.1.2-bin hive-3.1.2 #改名
- 配置环境
export HIVE_HOME=/root/apps/hive-3.1.2
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin
- 下载mysql-connector-java-5.1.39.jar 添加到/root/apps/hive-3.1.2/lib下
wget https://gitee.com/chenshiba/code/blob/master/mysql-connector-java-5.1.39.jar
- 修改hive-env.sh
mv hive-env.sh.template hive-env.sh
vim hive-env.sh
添加如下内容
HADOOP_HOME=/root/apps/hadoop-3.1.2
export HIVE_CONF_DIR=/root/apps/hive-3.1.2/conf
export HIVE_AUX_JARS_PATH=/root/apps/hive-3.1.2/lib
- 创建hive-site.xml
vim /root/apps/hive-3.1.2/conf/hive-site.xml
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://mini3:3306/hive?createDatabaseIfNotExist=true&useUnicode=true&characterEncoding=UTF-8&useSSL=false</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>1234</value>
</property>
<property>
<name>datanucleus.readOnlyDatastore</name>
<value>false</value>
</property>
<property>
<name>datanucleus.fixedDatastore</name>
<value>false</value>
</property>
<property>
<name>datanucleus.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>datanucleus.schema.autoCreateAll</name>
<value>true</value>
</property>
<property>
<name>datanucleus.autoCreateTables</name>
<value>true</value>
</property>
<property>
<name>datanucleus.autoCreateColumns</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.local</name>
<value>true</value>
</property>
<!-- 显示表的列名 -->
<property>
<name>hive.cli.print.header</name>
<value>true</value>
</property>
<!-- 显示数据库名称 -->
<property>
<name>hive.cli.print.current.db</name>
<value>true</value>
</property>
</configuration>
- 格式化数据库
schematool -dbType mysql -initSchema
- 连接客户端
方式一:
hive
方式二:
nohup hiveserver2 1>/dev/null 2>&1 &
beeline -u jdbc:hive2://localhost:10000 -n root
- 启动 hive 时
WARN: Establishing SSL connection without server's identity verification is not recommended. According to MySQL 5.5.45+, 5.6.26+ and 5.7.6+ requirements SSL connection must be established by default if explicit option isn't set. For compliance with existing applications not using SSL the verifyServerCertificate property is set to 'false'. You need either to explicitly disable SSL by setting useSSL=false, or set useSSL=true and provide truststore for server certificate verification.
jdbc:mysql://mini3:3306/hive?characterEncoding=utf8&createDatabaseIfNotExist=true&useSSL=false
- 进入hive 执行show databases时报错
FAILED: HiveException java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
schematool -dbType mysql -initSchema
- 格式化数据库错误
org.apache.hadoop.hive.metastore.HiveMetaException: Failed to get schema version.
Underlying cause: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException : Communications link failure
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
SQL Error code: 0
Use --verbose for detailed stacktrace.
*** schemaTool failed ***
确认连接的数据库是否正确
- beeline 拒绝连接
Error: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:10000: Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: root is not allowed to impersonate root (state=08S01,code=0)
通过httpfs协议访问rest接口,以root用户包装自己用户的方式操作HDFS
首先需要开启rest接口,在hdfs-site.xml文件中加入:
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>
然后在core-site.xml文件中加入:
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>