install hive-2.1.1 in fedora


1  /etc/profile

PATH=$PATH:$HOME/.local/bin:$HOME/bin


export JAVA_HOME=/home/[email protected]/software/jdk1.8.0_121
PATH=$PATH:$JAVA_HOME/bin
export SCALA_HOME=/usr/share/scala
export CLASSPATH=.:$JAVA_HOME/lib:$JAVA_HOME/lib/tools.jar:$JAVA_HOME/lib/dt.jar
export PATH

export HADOOP_HOME=/home/[email protected]/software/hadoop-2.7.0
#export HIVE_HOME=/home/[email protected]/software/hive-0.10.0
export HIVE_HOME=/home/[email protected]/software/apache-hive-2.1.1
export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop

export PATH=$PATH:$HBASE_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin

alias tohd='cd /home/[email protected]/software/hadoop-2.7.0'
#alias tohv='cd /home/[email protected]/software/hive-0.10.0'
alias tohv='cd /home/[email protected]/software/apache-hive-2.1.1'
alias tohb='cd /home/[email protected]/software/hbase-1.2.4'
alias tosp='cd /home/[email protected]/software/spark-2.1.0-bin-hadoop2.7'
alias tohdlog='cd /home/[email protected]/software/hadoop-2.7.0/logs'



2)

cp hive-default.xml.template hive-default.xml

cp hive-default.xml.template hive-site.xml

cp hive-exec-log4j2.properties.template hive-exec-log4j.properties

cp hive-log4j2.properties.template hive-log4j.properties



即:把幾個帶.template後綴的模板文件,複製一份變成不帶.template的配置文件,注意hive-default.xml.template這個要複製二份,一個是hive-default.xml,另一個是hive-site.xml,其中hive-site.xml爲用戶自定義配置,hive-default.xml爲全局配置,hive啓動時,-site.xml自定義配置會覆蓋-default.xml全局配置的相同配置項。


下面爲hive-site.xml: (同時,要修改 hive-site.xml 中所有包含 ${system:java.io.tmpdir} => /home/[email protected]/hive/tmp)


<configuration>
 <property>
       <name>hive.metastore.schema.verification</name>
       <value>false</value>
     </property>


    <property>
        <name>hive.metastore.local</name>
        <value>true</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:MySQL://127.0.0.1:3306/hive?characterEncoding=UTF-8</value>
    </property>
    
    <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.jdbc.Driver</value>
    </property>
    
    <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>root</value>
    </property>
    
    <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>Hrs12345</value>
    </property>
    
   
    <property>
        <name>hive.exec.scratchdir</name>
        <value>/tmp/hive</value>                  // in hadoop fs
    </property>
    
    <property>
        <name>hive.exec.local.scratchdir</name>
        <value>/home/[email protected]/hive/tmp</value>  //in linux fs
    </property>


    <property>
        <name>hive.downloaded.resources.dir</name>
        <value>/home/[email protected]/hive/tmp/${hive.session.id}_resources</value> //in linux
    </property>


    <property>
        <name>hive.metastore.warehouse.dir</name>
        <value>/user/hive/warehouse</value>     //in hadoop fs
    </property>
</configuration>

3) hive-env.sh

export HADOOP_HOME=/home/[email protected]/software/hadoop-2.7.0
export HIVE_HOME=/home/[email protected]/software/apache-hive-2.1.1
export HIVE_CONF_DIR=$HIVE_HOME/conf
export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop
export HIVE_AUX_JARS_PATH=$HIVE_HOME/lib
export PATH=$PATH:$HBASE_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin

$HADOOP_HOME/bin/hadoop fs -mkdir -p /user/hive/warehouse  
$HADOOP_HOME/bin/hadoop fs -mkdir -p /tmp/hive/  
hadoop fs -chmod 777 /user/hive/warehouse  
hadoop fs -chmod 777 /tmp/hive

4)start up

$HADOOP_HOME/bin/hadoop fs -mkdir -p /user/hive/warehouse  
$HADOOP_HOME/bin/hadoop fs -mkdir -p /tmp/hive/  
hadoop fs -chmod 777 /user/hive/warehouse  
hadoop fs -chmod 777 /tmp/hive



>./bin/schematool -initSchema -dbType mysql 




發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章