用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

1:创建shell脚本

 touch sqoop_options.sh
chmod 777 sqoop_options.sh

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

编辑文件  特地将执行map的个数设置为变量  测试 可以java代码传参数 同时也验证sqoop的 options 属性支持这种写法

 #!/bin/bash
/opt/cdh-5.3.6/sqoop-1.4.5-cdh5.3.6/bin/sqoop --options-file /opt/cdh-5.3.6/sqoop-1.4.5-cdh5.3.6/sqoop-import-mysql.txt --num-mappers $1
if [ $? -eq 0 ];then
echo "success"
else
echo "error"
fi

2:创建  sqoop-import-mysql.txt 文件并编辑

touch sqoop-import-mysql.txt
 export
--connect
jdbc:mysql://172.16.71.27:3306/babasport
--username
root
--password
root
--table
test_hive
--export-dir
/user/hive/warehouse/hive_bbs_product_snappy
--input-fields-terminated-by
'\t'

hive数据存在hdfs位置

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

3:开始写java后台代码   目前只支持 window写法 后期加上linux调用shell脚本的写法

 package com.liveyc.common.utils;

 import java.util.Properties;

 import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; public class FileToHbase {
/**
* shell脚本执行成功标识
*/
public static int SHELL_EXIT_OK = 0;
public static Log log = LogFactory.getLog(FileToHbase.class);
public static String connIp = "172.16.71.120";
public static String connUser = "root";
public static String connPwd = "123456"; public static void main(String[] args) throws Exception {
boolean result = export();
System.out.println(result);
} public static boolean export() throws Exception {
boolean result = false;
// 如果当前系统是window系统需要远程ssh连接系统
if (isWinSystem()) {
ConnectShell connectShell = new ConnectShell(connIp, connUser, connPwd, "utf-8");
String url = "/opt/cdh-5.3.6/sqoop-1.4.5-cdh5.3.6/sqoop_options.sh" + " " +1;
result = connectShell.excuteShellCommand(url);
}
return result;
} /**
* 当前操作系统类型
*
* @return true 为windos系统,false为linux系统
*/
public static boolean isWinSystem() {
// 获取当前操作系统类型
Properties prop = System.getProperties();
String os = prop.getProperty("os.name");
if (os.startsWith("win") || os.startsWith("Win")) {
return true;
} else {
return false;
}
}
}
 package com.liveyc.common.utils;

 import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import ch.ethz.ssh2.ChannelCondition;
import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.Session;
import ch.ethz.ssh2.StreamGobbler; /**
*
* ConnectShell
*
* @Description:连接Shell脚本所在服务器
* @author:aitf
* @date: 2016年3月31日
*
*/
public class ConnectShell {
private Connection conn;
private String ipAddr;
private String userName;
private String password;
private String charset = Charset.defaultCharset().toString();
private static final int TIME_OUT = 1000 * 5 * 60;
public static Log log = LogFactory.getLog(ConnectShell.class); public ConnectShell(String ipAddr, String userName, String password, String charset) {
this.ipAddr = ipAddr;
this.userName = userName;
this.password = password;
if (charset != null) {
this.charset = charset;
}
} public boolean login() throws IOException {
conn = new Connection(ipAddr);
conn.connect();
return conn.authenticateWithPassword(userName, password); // 认证
} /**
*
* @Title: excuteShellCommand
* @Description: 执行shell脚本命令
* @param shellpath
* @return
*/
public boolean excuteShellCommand(String shellpath) {
InputStream in = null;
boolean result = false;
String str = "";
try {
if (this.login()) {
Session session = conn.openSession();
//session.execCommand("cd /root");
session.execCommand(shellpath);
in = new StreamGobbler(session.getStdout());
// in = session.getStdout();
str = this.processStdout(in, charset);
session.waitForCondition(ChannelCondition.EXIT_STATUS, TIME_OUT);
session.close();
conn.close();
if (str.contains("success")) {
result = true;
}else{
result = false;
}
}
} catch (IOException e1) {
e1.printStackTrace();
}
return result;
} public String excuteShellCommand2(String shellpath) throws Exception {
InputStream in = null;
String result = "";
try {
if (this.login()) {
Process exec = Runtime.getRuntime().exec(shellpath);// ipconfig
in = exec.getInputStream();
result = this.processStdout(in, this.charset);
}
} catch (IOException e1) {
e1.printStackTrace();
}
return result;
} /**
* 转化结果
*
* @param in
* @param charset
* @return
* @throws UnsupportedEncodingException
*/
public String processStdout(InputStream in, String charset) throws UnsupportedEncodingException {
String line = null;
BufferedReader brs = new BufferedReader(new InputStreamReader(in, charset));
StringBuffer sb = new StringBuffer();
try {
while ((line = brs.readLine()) != null) {
sb.append(line + "\n");
}
} catch (IOException e) {
log.error("---转化出现异常---");
}
return sb.toString();
} }

4:开始测试

在mysql创建一个表  hive中数据格式 是  int int String

 CREATE TABLE test_hive(
id INT,
brand_id INT,
NAME VARCHAR(200)
)

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

执行java main方法 开始测试

观看8088端口 查看MapReduce的运行状况 发现正在运行(开心)

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

执行完毕  用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

可以看到 只有1个 MapReduce任务 (默认的个数是4个 这样看来第一步写的shell脚本 参数是传递过来了 sqoop的 options 也支持这种直接指定参数的写法)

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

现在转过来看java代码

返回值 :

 Warning: /opt/cdh-5.3.6/sqoop-1.4.5-cdh5.3.6/bin/../../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /opt/cdh-5.3.6/sqoop-1.4.5-cdh5.3.6/bin/../../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /opt/cdh-5.3.6/sqoop-1.4.5-cdh5.3.6/bin/../../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /opt/cdh-5.3.6/sqoop-1.4.5-cdh5.3.6/bin/../../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
success

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

发现返回 success 说明shell脚本执行成功了

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

一切执行正常   看下mysql 数据库表中有没有数据

用java代码调用shell脚本执行sqoop将hive表中数据导出到mysql

OK 一切正常 , 后期把linux执行shell脚本的语句也补充上 。

上一篇:axios的使用


下一篇:uniapp计算app缓存与清理缓存