import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
import java.security.PrivilegedAction;
import java.sql.*;
public class HiveUtil {
private static Configuration cfg;
private static Connection conn;
private static Statement stmt;
static {
System.setProperty("java.security.krb5.realm", "krb5.realm的名稱");
System.setProperty("java.security.krb5.kdc", "krb5機器名");
cfg = new Configuration();
cfg.set("fs.hdfs.impl.disable.cache", "true");
cfg.set("hadoop.security.authentication", "kerberos");
cfg.set("hadoop.rpc.protection", "authentication");
cfg.set("hadoop.security.crypto.cipher.suite", "AES/CTR/NoPadding");
}
/**
*
* @param principalName keytab的name
* @param keytab keytab的所在路徑
* @return
*/
private static UserGroupInformation login(String principalName, String keytab) {
try {
UserGroupInformation.setConfiguration(cfg);
UserGroupInformation information = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principalName, keytab);
System.out.println(information.hasKerberosCredentials());
return information;
} catch (IOException e) {
System.out.println(String.format("message:%s, parameter:{\"principalName\":\"%s\",\"keytab\":\"%s\"}",
"login kerberos error", principalName, keytab));
return null;
}
}
public static void conn() {
if (conn != null && stmt != null) {
return;
}
UserGroupInformation ugi = login(principal, keyTab);
ugi.doAs(new PrivilegedAction<Object>() {
public Object run() {
try {
Class.forName("org.apache.hive.jdbc.HiveDriver");
//url就是hive的jdbc連接信息
conn = DriverManager.getConnection(url);
stmt = conn.createStatement();
useDatabase();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
});
}
public static void useDatabase() {
try {
conn();
String database = "test";
String sql = String.format("use %s", database);
stmt.execute(sql);
} catch (SQLException e) {
e.printStackTrace();
}
}
public static void showTables() {
try {
String sql = "show tables";
ResultSet rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1));
}
} catch (SQLException e) {
e.printStackTrace();
}
}
public static void createTable(String sql) {
try {
sql = String.format("CREATE %s", sql);
System.out.println(sql);
stmt.execute(sql);
} catch (SQLException e) {
e.printStackTrace();
}
}
public static void loadData(String path, String table) throws Exception {
try {
String sql = "load data local inpath '" + path + "' into table " + table;
System.out.println("Running: " + sql);
stmt.execute(sql);
System.out.println();
} catch (SQLException e) {
e.printStackTrace();
}
}
public void insertSelect(String insertTable, String selectTable, String selectFields, Boolean overwrite) {
try {
String sql = "";
if (overwrite) {
sql = "insert into table " + insertTable + " select " + selectFields + " from " + selectTable;
} else {
sql = "insert overwrite into table " + insertTable + " select " + selectFields + " from " + selectTable;
}
boolean execute = stmt.execute(sql);
} catch (SQLException e) {
e.printStackTrace();
}
}
hive之java api
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.