安裝Hive(獨(dú)立模式 使用MySQL連接) 1.默認(rèn)安裝了java+hadoop2.下載對應(yīng)hadoop版本的安裝包3.解壓安裝包 tar zxvf apache-hive-1.2.1-bin.tar.gz4.安裝mysql yum -y install mysql-server mysql mysqldev //需要以root身份運(yùn)行 另外可能需要配置yum源 mysql常用命令: service mysqld start/stop chkconfig mysqld on //加入開機(jī)啟動 以系統(tǒng)root用戶操作5.授權(quán)mysql(以系統(tǒng)hadoop身份 數(shù)據(jù)庫root身份進(jìn)行操作) mysqladmin -u root passWord "root" //修改root密碼為root mysql -uroot -p密碼 (初始密碼為空) create user 'hive' identified by 'hive'; //創(chuàng)建用于連接的hive用戶 密碼為hive grant all PRivileges on *.* to 'hive'@'%' identified by "hive" with grant option; flush privileges; //刷新權(quán)限 grant all privileges on *.* to 'hive'@'localhost' identified by "hive" with grant option; flush privileges; //刷新權(quán)限 grant all privileges on *.* to 'hive'@'hadoop.master' identified by "hive" with grant option; flush privileges; //刷新權(quán)限 set global binlog_format='MIXED'; //設(shè)置格式 必須執(zhí)行。不然報錯 exit; service mysqld restart //重啟服務(wù)6.測試連接 mysql -hhadoop.master -uhive -phive //能進(jìn)去則表示設(shè)置成功 create database hive; //創(chuàng)建連接數(shù)據(jù)庫hive alter database hive character set latin1; 7.配置環(huán)境變量(/etc/profile) #hive export HIVE_HOME=/opt/hive-1.2.1 export HIVE_AUX_JARS_PATH=/opt/hive-1.2.1/lib export HIVE_CONF_DIR=/opt/hive-1.2.1/conf export PATH=$PATH:$HIVE_HOME/bin export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib 保存退出 source /etc/profile8.修改配置文件 1.根據(jù)模版復(fù)制配置文件 cp hive-default.xml.template hive-site.xml cp hive-env.sh.template hive-env.sh cp hive-log4j.properties.template hive-log4j.properties 2.修改配置文件 ####hive-site.xml#### //添加項目 --0.11 版本以后可不用添加此項 <property> <name>hive.metastore.local</name> <value>false</value> </property> //修改項目 <property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://hadoop.master:3306/hive</value> <description>JDBC connect string for a JDBC metastore</description> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> <description>Driver class name for a JDBC metastore</description> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>hive</value> <description>Username to use against metastore database</description> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>hive</value> <description>password to use against metastore database</description> </property> <property> <name>hive.exec.local.scratchdir</name> <value>/opt/hive-1.2.1/tmp</value> //需要創(chuàng)建此目錄 <description>Local scratch space for Hive jobs</description> </property> <property> <name>hive.downloaded.resources.dir</name> <value>/opt/hive-1.2.1/tmp</value> <description>Temporary local directory for added resources in the remote file system.</description> </property> <property> <name>hive.hwi.war.file</name> <value>/opt/hive-1.2.1/lib/hive-hwi-1.2.1.jar</value> <description>This sets the path to the HWI war file, relative to ${HIVE_HOME}. </description> </property> ####hive-env.sh#### HADOOP_HOME=/opt/hadoop-2.5.2 ####hive-log4j.properties#### hive.log.threshold=ALL hive.root.logger=INFO,DRFA hive.log.dir=/opt/hive-1.2.1/logs //需創(chuàng)建相應(yīng)目錄 hive.log.file=hive.log9.其他配置項 1.hdfs上創(chuàng)建相應(yīng)文件夾并修改權(quán)限 hadoop fs -mkdir -p /tmp/hive hadoop fs -chmod 777 /tmp/hive hadoop fs -mkdir -p /user/hive hadoop fs -chmod 777 /user/hive 2.修改hadoop hadoop-env.sh配置文件 export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$CLASSPATH 3.將mysql jdbc jar包拷貝到lib目錄下 cp mysql-connector-java-5.1.21.jar /opt/hive-1.2.1/lib 4.將lib目錄下的jline-2.12.jar 拷貝到/opt/hadoop-2.5.2/share/hadoop/yarn/lib下,并將相應(yīng)包重屬名 cp /opt/hive-1.2.1/lib/jline-2.12.jar /opt/hadoop-2.5.2/share/hadoop/yarn/lib mv /opt/hadoop-2.5.2/share/hadoop/yarn/lib/jline-0.9.94.jar /opt/hadoop-2.5.2/share/hadoop/yarn/lib/jline-0.9.94.jar.bak9.使用驗證 hive --service metastore & //開機(jī)首次執(zhí)行 hive -e "show databases;" //運(yùn)行完不報錯即安裝成功 也可以使用hive命令行進(jìn)去執(zhí)行10.常用命令 1.顯示 show tables; show databases; 2.定義 //添加外部分區(qū)表 推薦以后就是用這種表 create external table access_info(ip string,access_date string,url string) partitioned by(logdate string) row format delimited fields terminated by '/t' desc access_info; 3.添加數(shù)據(jù) alter table access_info add partition(logdate='2016-01-15') location '/access'; --加載文件hdfs實(shí)際路徑 access為文件夾名稱 load data local inpath '/home/hadoop/huangzhijian/access.txt' into table access_info_local_file; --加載本地文件 3.查詢 select * from access_info; 4.刪除 drop table access_info; //外部表不會刪除原本數(shù)據(jù) 內(nèi)部表就會刪除原數(shù)據(jù) //注意不能update 5.其他 1.hive –f test.sql ####test.sql#### select * from t1; select count(*) from t1; 2.hive -e 'hql語句' 3.hive -S -e 'select * from t1'(用法與第一種方式的靜音模式一樣,不會顯示mapreduce的操作過程) 4.hive -e 'select * from t1' > test.txt (將結(jié)果輸出到本地文件中) |
|
新聞熱點(diǎn)
疑難解答
圖片精選