〇、參考地址
1、Linux下編寫指令碼自動安裝hive
2、如何執行.sh指令碼檔案
https://blog.csdn.net/weixin_55821558/article/details/125830542
3、hive教程:啟動hiveserver2,透過jdbc方式訪問hive☆
https://blog.csdn.net/a12355556/article/details/124565395
2、CDH安裝hadoop與版本比較
https://www.freesion.com/article/8763708397/
一、程式碼編寫
1、下載Hive
原生:http://archive.apache.org/dist/hive/hive-1.1.0/
CDH版本(已失效):https://archive.cloudera.com/p/cdh5/cdh/5 注意:登入名為郵箱,密碼大小寫數字+符號!
命令下載(已失效):wget https://archive.cloudera.com/cdh5/cdh/5/hive-1.1.0-cdh5.14.2.tar.gz
CDH5網盤備份:連結:https://pan.baidu.com/s/1XUGRMpjTbrJWDy9QCT9vTw?pwd=gmyf
比較:CDH版本比原生的相容性更強,下載哪個都可以
2、編寫指令碼
vi hive_insatll.sh
echo "----------安裝hive----------"
#-C 指定目錄
tar -zxf /usr/local/hive-1.1.0-cdh5.14.2.tar.gz -C /usr/local/
#改名
mv /usr/local/hive-1.1.0-cdh5.14.2 /usr/local/hive110
#配置環境變數
echo '#hive' >>/etc/profile
echo 'export HIVE_HOME=/usr/local/hive110' >>/etc/profile
echo 'export PATH=$PATH:$HIVE_HOME/bin' >>/etc/profile
#建立配置檔案hive-site.xml
touch /usr/local/hive110/conf/hive-site.xml
path="/usr/local/hive110/conf/hive-site.xml"
#編寫配置
echo '<?xml version="1.0" encoding="UTF-8" standalone="no"?>' >> $path
echo '<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>' >> $path
echo '<configuration>' >> $path
#和jdbc如出一轍,更換自己的ip地址和使用者名稱密碼即可
echo '<property><name>javax.jdo.option.ConnectionURL</name><value>jdbc:mysql://192.168.91.137:3306/hive137?createDatabaseIfNotExist=true</value></property>' >> $path
echo '<property><name>javax.jdo.option.ConnectionDriverName</name><value>com.mysql.jdbc.Driver</value></property>' >> $path
echo '<property><name>javax.jdo.option.ConnectionUserName</name><value>root</value></property>' >> $path
echo '<property><name>javax.jdo.option.ConnectionPassword</name><value>123123</value></property>' >> $path
echo '<property><name>hive.server2.thift.client.user</name><value>root</value></property>' >> $path
echo '<property><name>hive.server2.thift.client.password</name><value>123123</value></property>' >> $path
echo '</configuration>' >>$path
3、呼叫
新增執行許可權:chmod u+x hive_insatll.sh
執行.sh檔案:./hive_insatll.sh 或 sh hive_insatll.sh
4、使環境變數生效
source /etc/profile
二、執行後的其他操作
1、下載mysql的jar包
下載地址:https://mvnrepository.com/artifact/mysql/mysql-connector-java/5.1.38
其他jar包:mysql-binlog-connector-java、 eventuate-local-java-cdc-connector-mysql-binlog……
注意:已經轉至新目錄
2、放入hive110/lib目錄
3、執行格式化操作
schematool -dbType mysql -initSchema
4、啟動hiveserver2
前臺啟動:hive --service hiveserver2
後臺啟動:nohup hive --service hiveserver2 2>&1 &
組合使用: nohup [xxx 命令操作]> file 2>&1 &,表示將 xxx 命令執行的結 果輸出到 file 中(第一個2表示錯誤輸出,另外0表示標準輸入,1表示標準輸出)
三、配置與驗證
1、beeline 客戶端連線hive
連線:beeline -u jdbc:hive2://localhost:10000 -n root
執行語句:show databases;
2、java驗證
(1)引入依賴
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.1.0</version>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>jetty-all</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hive</groupId>
<artifactId>hive-shims</artifactId>
</exclusion>
</exclusions>
</dependency>
(2)程式碼驗證
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
public class HiveAPITest {
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
public static void main(String[] args) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
//replace "hive" here with the name of the user the queries should run as
Connection con = DriverManager.getConnection("jdbc:hive2://localhost:10000/default",
"hive", "");
Statement stmt = con.createStatement();
String tableName = "testHiveDriverTable";
stmt.execute("drop table if exists " + tableName);
stmt.execute("create table " + tableName + " (key int, value string) row format delimited fields terminated by '\t'");
// show tables
String sql = "show tables '" + tableName + "'";
System.out.println("Running: " + sql);
ResultSet res = stmt.executeQuery(sql);
if (res.next()) {
System.out.println(res.getString(1));
}
// describe table
sql = "describe " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
// load data into table
// NOTE: filepath has to be local to the hive server
// NOTE: /opt/tmp/a.txt is a \t separated file with two fields per line
String filepath = "/opt/tmp/a.txt";
sql = "load data local inpath '" + filepath + "' into table " + tableName;
System.out.println("Running: " + sql);
stmt.execute(sql);
// select * query
sql = "select * from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
}
// regular hive query
sql = "select count(1) from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1));
}
} }