linux下基于hadoop安装hive和Zeppelin安装

将包放在opt目录下

/opt/soft/hive110/lib目录下

[root@wq opt]# tar -zxf hive-1.1.0-cdh5.14.2.tar.gz
[root@wq opt]# mv hive-1.1.0-cdh5.14.2 soft/hive110
[root@wq opt]# cd /opt/soft/hive110/conf
[root@wq conf]# touch hive-site.xml
[root@wq conf]# vim hive-site.xml

修改hive-site.xml如下(ip地址需要更改




	
		hive.metastore.warehouse.dir
		hive110/warehouse
	
	
		hive.metastore.local
		false
	
	
		javax.jdo.option.ConnectionURL
		jdbc:mysql://192.168.100.155:3306/hive?useSSL=false&createDatabaseIfNotExist=true
	
	
		javax.jdo.option.ConnectionDriverName
		com.mysql.jdbc.Driver
	
	
		javax.jdo.option.ConnectionUserName
		root
	
	
		javax.jdo.option.ConnectionPassword
		okok
	
	
    		hive.server2.authentication
    		NONE
	
	
    		hive.server2.thrift.client.user
    		root
	
	
    		hive.server2.thrift.client.password
   		 root
	
[root@wq conf]# vim /etc/profile
[root@wq conf]# source /etc/profile

修改profile文件如下

#hive environment
export HIVE_HOME=/opt/soft/hive110
export PATH=$path:$hive_home/bin

bin下方 

schematool -dbType mysql -initSchema

启动hive (bin下

linux下基于hadoop安装hive和Zeppelin安装_第1张图片

 hive --service hiveserver2 & 

第二章Beeline用法

beeline -u jdbc:hive2://192.168.100.155:10000/mydemo

启动脚本

#! /bin/bash
 
my_start(){
	if [ $1 == "start" ];then
		#start hadoop
		sh /opt/soft/hadoop260/sbin/start-dfs.sh
		sh /opt/soft/hadoop260/sbin/start-yarn.sh
		#start hive
		nohup  /opt/soft/hive110/bin/hive --service hiveserver2 &
		#start zeppelin
		sh /opt/soft/zeppelin081/bin/zeppelin-daemon.sh start
		echo "start over"
	else
		#close zeppelin
		sh /opt/soft/zeppelin081/bin/zeppelin-daemon.sh stop
		#close hive
		hiveprocess=`jps | grep RunJar | awk '{print $1}'`
 
		for no in $hiveprocess
		do
			kill -9 $no  #如果出现多个Jar 循环删除
		done
 
		#stop hadoop
		sh /opt/soft/hadoop260/sbin/stop-dfs.sh
		sh /opt/soft/hadoop260/sbin/stop-yarn.sh
 
		echo "stop over"
	fi
}
 
my_start $1
	
chmod +x run.sh
source run.sh start
source run.sh stop

Zeppelin安装

tar -zvxf zeppelin-0.8.1-bin-all.tgz
mv zeppelin-0.8.1-bin-all soft/zeppelin081
 cd soft/zeppelin081/conf

修改 配置文件

cp zeppelin-site.xml.template zeppelin-site.xml
vim zeppelin-site.xml

  zeppelin.helium.registry
  helium
cp zeppelin-env.sh.template zeppelin-env.sh

添加JAVA_HOME和HADOOP_CONF_DIR  (指定自己的java和hadoop安装目录)

vim zeppelin-env.sh   
 
 export JAVA_HOME=/opt/soft/jdk180
export HADOOP_CONF_DIR=/opt/soft/hadoop260/etc/hadoop     
 vim /etc/profile
 
source /etc/profile
#zeppelin environment
export ZEPPELIN_HOME=/opt/soft/zeppelin081
 
export PATH=$PATH:$ZEPPELIN_HOME/bin
[root@wq bin]# cp /opt/soft/hive110/conf/hive-site.xml /opt/soft/zeppelin081/conf/

导入jar包

[root@wq bin]# cp /opt/soft/hadoop260/share/hadoop/common/hadoop-common-2.6.0-cdh5.14.2.jar /opt/soft/opt/soft/zeppelin081/interpreter/jdbc
cp /opt/soft/hive110/lib/hive-jdbc-1.1.0-cdh5.14.2-standalone.jar /opt/soft/zeppelin081/interpreter/jdbc/
zeppelin-daemon.sh start

 进入http://192.168.100.155:8080/#/

linux下基于hadoop安装hive和Zeppelin安装_第2张图片

 linux下基于hadoop安装hive和Zeppelin安装_第3张图片

 

default.driver   org.apache.hive.jdbc.HiveDriver

default.url     jdbc:hive2://192.168.42.200:10000

default.user    hive

linux下基于hadoop安装hive和Zeppelin安装_第4张图片

 

你可能感兴趣的:(hadoop,hive,linux)