hive环境搭建

2022-10-26 10:54:23 浏览数 (2)

1.下载Hive

代码语言:shell复制
#下载
wget https://dlcdn.apache.org/hive/hive-3.1.3/apache-hive-3.1.3-bin.tar.gz

# 解压
tar -zxvf apache-hive-3.1.3-bin.tar.gz

2.配置环境变量

vim /etc/profile

代码语言:txt复制
export HIVE_HOME=/home/hadoop/bigdata/hive
export export PATH=$PATH:$HIVE_HOME/bin

3.修改配置信息

1.修改 hive-env.sh

cp hive-env.sh.template hive-env.sh

vim hive-env.sh

代码语言:shell复制
HADOOP_HOME=/home/hadoop/bigdata/hadoop

# Hive Configuration Directory can be controlled by:
# export HIVE_CONF_DIR=
export HIVE_CONF_DIR=/home/hadoop/bigdata/hive/conf
2.修改 hive-sitz.xml
代码语言:html复制
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
    <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://node01:3306/hive?createDatabaseIfNotExist=true</value>
        <description>JDBC connect string for a JDBC metastore</description>
    </property>

    <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.cj.jdbc.Driver</value>
        <description>Driver class name for a JDBC metastore</description>
    </property>

    <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>hive</value>
        <description>username to use against metastore database</description>
    </property>

    <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>hive</value>
        <description>password to use against metastore database</description>
    </property>


    <property>
        <name>hive.cli.print.header</name>
        <value>true</value>
        <description>Whether to print the names of the columns in query output.</description>
    </property>

    <property>
        <name>hive.cli.print.current.db</name>
        <value>true</value>
        <description>Whether to include the current database in the Hive prompt.</description>
    </property>
    <property>
        <name>hive.server2.thrift.bind.host</name>
        <value>node01</value>
    </property>
    <property>
        <name>hive.server2.thrift.port</name>
        <value>10000</value>
    </property>
    <property>
        <name>hive.exec.dynamic.partition.mode</name>
        <value>nonstrict</value>
    </property>

</configuration>

4.拷贝jar包

cd $HIVE_HOME/

wget https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.30/mysql-connector-java-8.0.30.jar

复制hadoop最新guava包到hive lib*

cp $HADOOP_HOME//share/hadoop/common/lib/guava-27.0-jre.jar $HIVE_HOME/lib

5.初始化元数据库

代码语言:shell复制
schematool -initSchema -dbType mysql 

mysql创建用户

代码语言:sql复制
CREATE USER 'hive'@'%' IDENTIFIED BY 'hive';

grant all privileges on hive.* to 'hive'@'%';
flush privileges;

6.启动hiveserver2

cd ${HIVE_HOME}/bin;nohup ${HIVE_HOME}/bin/hiveserver2 > ${HIVE_HOME}/logs/hiveserver2.log 2>&1 &

7.登录hive

代码语言:shell复制
# 正常方式
hive
 
# 使用hiveserver2(在启动服务后需要一两分钟后才能登录)
beeline -u jdbc:hive2://node01:10000/default -n hadoop

0 人点赞