重要:在使用 JDBC 开发 Hive 程序时, 必须首先开启 Hive 的远程服务接口。使用下面命令进行开启:hive -service hiveserver &
1). 测试数据
userinfo.txt文件内容(每行数据之间用tab键隔开):
userinfo.txt文件内容(每行数据之间用tab键隔开):
1 xiapi2 xiaoxue3 qingqing
2). 程序代码
packagecom.ljq.hive;import java.sql.Connection;
import java.sql.DriverManager;import java.sql.ResultSet;
import java.sql.SQLException;import java.sql.Statement;
import org.apache.log4j.Logger;
public class HiveJdbcClient {
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
private staticString url = "jdbc:hive://192.168.11.157:10000/default";
private staticString user = "hive";
private static String password = "mysql";
private staticString sql = "";
private static ResultSet res;
private static finalLogger log = Logger.getLogger(HiveJdbcClient.class);
public static voidmain(String[] args) {
try {
Class.forName(driverName);
Connection conn = DriverManager.getConnection(url,user, password);
Statement stmt =conn.createStatement();
// 创建的表名
String tableName ="testHiveDriverTable";
/** 第一步:存在就先删除 **/
sql = "drop table" + tableName;
stmt.executeQuery(sql);
/** 第二步:不存在就创建 **/
sql = "createtable " + tableName + " (key int, value string) row format delimited fields terminated by'\t'";
stmt.executeQuery(sql);
// 执行“show tables”操作
sql = "show tables'" + tableName + "'";
System.out.println("Running:" + sql);
res =stmt.executeQuery(sql);
System.out.println("执行“showtables”运行结果:");
if (res.next()) {
System.out.println(res.getString(1));
}
// 执行“describe table”操作
sql = "describe" + tableName;
System.out.println("Running:" + sql);
res =stmt.executeQuery(sql);
System.out.println("执行“describetable”运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" +res.getString(2));
}
// 执行“load data into table”操作
String filepath ="/home/hadoop/ziliao/userinfo.txt";
sql = "load datalocal inpath '" + filepath + "' into table " + tableName;
System.out.println("Running:" + sql);
res =stmt.executeQuery(sql);
// 执行“select * query”操作
sql = "select *from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“select* query”运行结果:");
while (res.next()) {
System.out.println(res.getInt(1) + "\t" + res.getString(2));
}
// 执行“regular hive query”操作
sql = "selectcount(1) from " + tableName;
System.out.println("Running:" + sql);
res =stmt.executeQuery(sql);
System.out.println("执行“regularhive query”运行结果:");
while (res.next()) {
System.out.println(res.getString(1));
}
conn.close();
conn = null;
} catch (ClassNotFoundException e) {
e.printStackTrace();
log.error(driverName +" not found!", e);
System.exit(1);
} catch (SQLException e) {
e.printStackTrace();
log.error("Connection error!", e);
System.exit(1);
}
}
}
3). 运行结果(右击-->Run as-->Run on Hadoop)
Running:show tables'testHiveDriverTable'执行“show tables”运行结果:
testhivedrivertable
Running:describe testHiveDriverTable
执行“describe table”运行结果:
key intvalue string
Running:load data local inpath'/home/hadoop/ziliao/userinfo.txt' into table testHiveDriverTable
Running:select * fromtestHiveDriverTable
执行“select * query”运行结果:1 xiapi2 xiaoxue3 qingqing
Running:select count(1) fromtestHiveDriverTable
执行“regular hive query”运行结果:3