centos7 hive安装


安装hive3.1.1

  * 安装环境

  操作系统:centos7

  Hadoop:3.2.2

  Mysql:5.7

  * 下载安装hive

  点击下载

  * 解压

 tar -zxvf apache-hive-3.1.1-bin.tar.gz 
cd apache-hive-3.1.1-bin
ll
total 56
drwxr-xr-x 3 root root   157 Aug  5 13:31 bin
drwxr-xr-x 2 root root  4096 Aug  5 13:31 binary-package-licenses
drwxr-xr-x 2 root root  4096 Aug  5 13:31 conf
drwxr-xr-x 4 root root    34 Aug  5 13:31 examples
drwxr-xr-x 7 root root    68 Aug  5 13:31 hcatalog
drwxr-xr-x 2 root root    44 Aug  5 13:31 jdbc
drwxr-xr-x 4 root root 12288 Aug  5 13:31 lib
-rw-r--r-- 1 root root 20798 Oct 23  2018 LICENSE
-rw-r--r-- 1 root root   230 Oct 23  2018 NOTICE
-rw-r--r-- 1 root root   222 Oct 23  2018 RELEASE_NOTES.txt
drwxr-xr-x 4 root root    35 Aug  5 13:31 scripts

  * 添加环境变量

vi /etc/profile

#添加以下内容
export HIVE_HOME=/usr/local/hadoop/apache-hive-3.1.1-bin
export PATH=$PATH:HIVE_HOME/bin
#更新资源
source /etc/profile

  * 在 /usr/local/hadoop/apache-hive-3.1.1-bin/conf/ 下创建hive-site.xml 文件

ls
beeline-log4j2.properties.template  hive-env.sh.template                  ivysettings.xml                         metastore_db
derby.log                           hive-exec-log4j2.properties.template  llap-cli-log4j2.properties.template     parquet-logging.properties
hive-default.xml.template           hive-log4j2.properties.template       llap-daemon-log4j2.properties.template

# 复制hive-default.xml.template并更名为hive-site.xml
cp hive-default.xml.template hive-site.xml

ls
beeline-log4j2.properties.template  hive-env.sh.template                  hive-site.xml                        llap-daemon-log4j2.properties.template
derby.log                           hive-exec-log4j2.properties.template  ivysettings.xml                      metastore_db
hive-default.xml.template           hive-log4j2.properties.template       llap-cli-log4j2.properties.template  parquet-logging.properties

  * 修改hive-site.xml

  
    hive.metastore.warehouse.dir
    /user/hive/warehouse
    location of default database for the warehouse
  

  
    hive.exec.scratchdir
    /tmp/hive
    HDFS root scratch dir for Hive jobs which gets created with write all (733) permission. For each connecting user, an HDFS scratch dir: ${hive.exec.scratchdir}/<username> is created, with ${hive.scratch.dir.permission}.
  

  * 创建两个对应的目录并赋予读写权限

hadoop fs -mkdir -p /user/hive/warehouse
hadoop fs -mkdir -p /tmp/hive
hadoop fs -chmod -R 777 /user/hive/warehouse
hadoop fs -chmod -R 777 /tmp/hive
hadoop fs -ls /
Found 4 items
drwxr-xr-x   - root supergroup          0 2019-08-02 09:01 /input
drwxr-xr-x   - root supergroup          0 2019-08-02 09:02 /output
drwx------   - root supergroup          0 2019-08-05 14:05 /tmp
drwxr-xr-x   - root supergroup          0 2019-08-05 19:05 /user

  * Hive相关配置

将 hive-site.xml 中的{system:java.io.tmpdir}改为hive的本地临时目录,将{system:user.name}改为用户名。
如果该目录不存在,需要先创建该目录。

mkdir temp
chmod -R 777 temp
pwd
/usr/local/hive/apache-hive-3.1.1-bin/temp


  
    hive.exec.local.scratchdir
    /usr/local/hive/apache-hive-3.1.1-bin/temp/root
  
  
  
    hive.downloaded.resources.dir
    /usr/local/hive/apache-hive-3.1.1-bin/temp/${hive.session.id}_resources
  

  
    hive.server2.logging.operation.log.location
    /usr/local/hive/apache-hive-3.1.1-bin/temp/root/operation_logs
  

  
    hive.querylog.location
    /usr/local/hive/apache-hive-3.1.1-bin/temp/root
  

  * 数据库相关配置

同样修改 hive-site.xml 中的以下几项

# 数据库jdbc地址,value标签内修改为主机ip地址
  
    javax.jdo.option.ConnectionURL
    jdbc:mysql://DW1:3306/hive?createDatabaseIfNotExist=true&characterEncoding=UTF-8
  
  
# 数据库的驱动类名称
# 新版本8.0版本的驱动为com.mysql.cj.jdbc.Driver
# 旧版本5.x版本的驱动为com.mysql.jdbc.Driver
  
    javax.jdo.option.ConnectionDriverName
    com.mysql.cj.jdbc.Driver
  
  
# 数据库用户名
  
    javax.jdo.option.ConnectionUserName
    root
  
 
# 数据库密码
   
    javax.jdo.option.ConnectionPassword
    123456 #修改为你自己的mysql密码
  

  
    hive.metastore.schema.verification
    false
  

  * 配置hive-log4j2.properties

复制并更名hive-log4j2.properties.template为 hive-log4j2.properties文件

cp hive-log4j2.properties.template hive-log4j2.properties
vi hive-log4j2.properties
# 修改内容
property.hive.log.dir = /usr/local/hive/apache-hive-3.1.1-bin/temp/root

  * 配置hive-env.sh文件

cp hive-env.sh.template hive-env.sh
vi hive-env.sh


# Set HADOOP_HOME to point to a specific hadoop install directory
# HADOOP_HOME=${bin}/../../hadoop
export HADOOP_HOME=/usr/local/hadoop/hadoop-3.2.0 #hadoop 安装目录

# Hive Configuration Directory can be controlled by:
# export HIVE_CONF_DIR=
export HIVE_CONF_DIR=/usr/local/hive/apache-hive-3.1.1-bin/conf #hive 配置文件目录

# Folder containing extra libraries required for hive compilation/execution can be controlled by:
# export HIVE_AUX_JARS_PATH=
export HIVE_AUX_JARS_PATH=/usr/local/hive/apache-hive-3.1.1-bin/lib #hive 依赖jar包目录

  * 启动hive

mysql -u root -p
mysql> create database hive;
Query OK, 1 row affected (0.05 sec)

  * 注意需要下载数据库驱动

相关