1、在Hadoop/etc/core-site.xml中新增
<property>
<name>hadoop.proxyuser.qiqi.hosts</name>
<value>主节点地址</value>
</property>
<property>
<name>hadoop.proxyuser.qiqi.groups</name>
<value>*</value>
</property>
修改后重启Hadoop
测试:./beeline -u 'jdbc:hive2://localhost:10000/userdb' -n doutao
2、到hive/conf/hive-site.xml文件下,修改文件
<configuration>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.server2.thrift.bind.host</name>
<value>192.168.33.128</value>
</property>
</configuration>
3、启动hive服务
hive --service hiveserver2
4、打开eclipse,新建hive项目,导入hive/lib下的jar包
5、代码封装测试
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class Hive {
Connection con=null;
Statement statement=null;
ResultSet res=null;
public boolean openConnection() throws SQLException{
try {
Class.forName("org.apache.hive.jdbc.HiveDriver");
con=DriverManager.getConnection("jdbc:hive2://192.168.147.140:10000/default", "qiqi", "123456");
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return false;
}
//select
public ResultSet selevtAll(String sql) throws SQLException{
statement=con.createStatement();
res=statement.executeQuery(sql);
return res;
}
//insert update delete
public boolean updatAll(String sql) throws SQLException{
statement=con.createStatement();
boolean re=statement.execute(sql);
return re;
}
//close
public void closeAll(){
try {
res.close();
statement.close();
con.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
public class Test {
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
Hive hive = new Hive();
//String dropSql="drop table kissli";
//String createSql="CREATE TABLE gool(name String,age int)";
//String createSql="CREATE TABLE kiss(key string,xm string,nl int)STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ('hbase.columns.mapping' = ':key,hu:xm,huxie:nl') TBLPROPERTIES ('hbase.table.name' = 'kissli1')";
//String insertSql="insert into gool values('wangzhang',89)";
String querySql="select * from gool";
//String sql="drop table fengzi";
boolean sta=hive.openConnection();
//System.out.print(hive.updatAll(insertSql));
//statement.execute(createSql);
//statement.execute(insertSql);
ResultSet rs=hive.selevtAll(querySql);
while(rs.next()){
System.out.println(rs.getString(1));
System.out.println(rs.getInt(2));
}
hive.closeAll();
}
}