hive 远程模式

安装

tar -zxvf apache-hive-3.1.3-bin.tar.gz 

docker cp /home/apache-hive-3.1.3-bin/ hadoop_master:/usr/local/

rm -rf /usr/local/apache-hive-3.1.3-bin/lib/guava-19.0.jar

cp /usr/local/hadoop-3.3.6/share/hadoop/common/lib/guava-27.0-jre.jar /usr/local/apache-hive-3.1.3-bin/lib/

修改配置文件

cd /usr/local/apache-hive-3.1.3-bin/conf
cp hive-env.sh.template hive-env.sh

vi hive-env.sh

export HADOOP_HOME=/usr/local/hadoop-3.3.6
export HIVE_CONF_DIR=/usr/local/apache-hive-3.1.3-bin/conf
export HIVE_AUX_JARS_PATH=/usr/local/apache-hive-3.1.3-bin/lib

vi /usr/local/apache-hive-3.1.3-bin/conf/hive-site.xml

## 注意&符号需要转为 &

<configuration>
    <!-- 存储元数据mysql相关配置-->
    <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://master:3306/hive3?createDatabaseIfNotExist=true&amp;useSSL=false&amp;useUnicode=true&amp;characterEncoding=UTF-8</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.jdbc.Driver</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>root</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>root</value>
    </property>
    <!-- H2S运行绑定host-->
    <property>
        <name>hive.server2.thrift.bind.host</name>
        <value>master</value>
    </property>
    <!-- 远程模式部署metastore metastore地址-->
    <property>
        <name>hive.metastore.uris</name>
        <value>thrift://master:9083</value>
    </property>
    <!-- 关闭元数据存储授权-->
    <property>
        <name>hive.metastore.event.db.notification.api.auth</name>
        <value>false</value>
    </property>
</configuration>

mysql jdbc驱动

## 驱动下载
https://downloads.mysql.com/archives/c-j/

## 下载 rpm 包 并安装
rpm -ivh 报名
## 查看驱动位置
rpm -ql 包名

## zip包
docker cp /home/mysql-connector-java-5.1.49/mysql-connector-java-5.1.49.jar hadoop_master:/usr/local/apache-hive-3.1.3-bin/lib/
cd /usr/local/apache-hive-3.1.3-bin/bin

初始化元数据

cd /usr/local/apache-hive-3.1.3-bin/bin 
./schematool -initSchema -dbType mysql -verbos
#初始化成功会在mysql中创建74张表

在hdfs创建hive存储目录

hadoop fs -mkdir /tmp
hadoop fs -mkdir -p /user/hive/warehouse
hadoop fs -chmod g+w /tmp
hadoop fs -chmod g+w /user/hive/warehouse

1.启动metastore服务 
#前台启动 关闭ctrl+c

/usr/local/apache-hive-3.1.3-bin/bin/hive --service metastore

#前台启动开启debug日志

/usr/local/apache-hive-3.1.3-bin/bin/hive --service metastore --hiveconf hive.root.logger=DEBUG,console

#后台启动 进程挂起  关闭使用jps+kill -9 

nohup /usr/local/apache-hive-3.1.3-bin/bin/hive --service metastore &

2.启动hiveserver2服务

nohup /usr/local/apache-hive-3.1.3-bin/bin/hive --service hiveserver2 &

#注意 启动hiveserver2需要一定时间 不要启动之后立即beeline连接 可能连接不上

3.beeline 客户端连接
## 拷贝master安装包到beeline客户端机器上(slave2)
scp -r /usr/local/apache-hive-3.1.3-bin/ root@slave2:/usr/local/
## 连接访问
ssh slave2
/usr/local/apache-hive-3.1.3-bin/bin/beeline
beeline> ! connect jdbc:hive2://master:10000
beeline> root
beeline> 直接回车不要密码
©2019 Somore 豫ICP备19009951号 sqlixiaoli@163.com