重庆分公司,新征程启航
为企业提供网站建设、域名注册、服务器等服务
返回结果 将返回结果放到spark rdd 例如: JavaSparkContext sc = new JavaSparkContext(conf);
10余年的原阳网站建设经验,针对设计、前端、开发、售后、文案、推广等六对一服务,响应快,48小时及时工作处理。全网整合营销推广的优势是能够根据用户设备显示端的尺寸不同,自动调整原阳建站的显示方式,使网站能够适用不同显示终端,在浏览器中调整网站的宽度,无论在任何一种浏览器上浏览网站,都能展现优雅布局与设计,从而大程度地提升浏览体验。创新互联从事“原阳网站设计”,“原阳网站推广”以来,每个客户项目都认真落实执行。
可以参照以下代码来实现:
String sql="show tables; select * from test_tb limit 10";
ListString command = new ArrayListString();
command.add("hive");
command.add("-e");
command.add(sql);
ListString results = new ArrayListString();
ProcessBuilder hiveProcessBuilder = new ProcessBuilder(command);
hiveProcess = hiveProcessBuilder.start();
BufferedReader br = new BufferedReader(new InputStreamReader(
hiveProcess.getInputStream()));
String data = null;
while ((data = br.readLine()) != null) {
results.add(data);
}
如果你的项目是java项目的话,就需要使用hive提供的java api,如下代码:
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.log4j.Logger;
/**
* Hive的JavaApi
*
* 启动hive的远程服务接口命令行执行:hive --service hiveserver /dev/null 2/dev/null
*
* @author 吖大哥
*
*/
public class HiveJdbcCli {
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive://hadoop3:10000/default";
private static String user = "hive";
private static String password = "mysql";
private static String sql = "";
private static ResultSet res;
private static final Logger log = Logger.getLogger(HiveJdbcCli.class);
public static void main(String[] args) {
Connection conn = null;
Statement stmt = null;
try {
conn = getConn();
stmt = conn.createStatement();
// 第一步:存在就先删除
String tableName = dropTable(stmt);
// 第二步:不存在就创建
createTable(stmt, tableName);
// 第三步:查看创建的表
showTables(stmt, tableName);
// 执行describe table操作
describeTables(stmt, tableName);
// 执行load data into table操作
loadData(stmt, tableName);
// 执行 select * query 操作
selectData(stmt, tableName);
// 执行 regular hive query 统计操作
countData(stmt, tableName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
log.error(driverName + " not found!", e);
System.exit(1);
} catch (SQLException e) {
e.printStackTrace();
log.error("Connection error!", e);
System.exit(1);
} finally {
try {
if (conn != null) {
conn.close();
conn = null;
}
if (stmt != null) {
stmt.close();
stmt = null;
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
private static void countData(Statement stmt, String tableName)
throws SQLException {
sql = "select count(1) from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“regular hive query”运行结果:");
while (res.next()) {
System.out.println("count ------" + res.getString(1));
}
}
private static void selectData(Statement stmt, String tableName)
throws SQLException {
sql = "select * from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 select * query 运行结果:");
while (res.next()) {
System.out.println(res.getInt(1) + "\t" + res.getString(2));
}
}
private static void loadData(Statement stmt, String tableName)
throws SQLException {
String filepath = "/home/hadoop01/data";
sql = "load data local inpath '" + filepath + "' into table "
+ tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
}
private static void describeTables(Statement stmt, String tableName)
throws SQLException {
sql = "describe " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 describe table 运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}
private static void showTables(Statement stmt, String tableName)
throws SQLException {
sql = "show tables '" + tableName + "'";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 show tables 运行结果:");
if (res.next()) {
System.out.println(res.getString(1));
}
}
private static void createTable(Statement stmt, String tableName)
throws SQLException {
sql = "create table "
+ tableName
+ " (key int, value string) row format delimited fields terminated by '\t'";
stmt.executeQuery(sql);
}
private static String dropTable(Statement stmt) throws SQLException {
// 创建的表名
String tableName = "testHive";
sql = "drop table " + tableName;
stmt.executeQuery(sql);
return tableName;
}
private static Connection getConn() throws ClassNotFoundException,
SQLException {
Class.forName(driverName);
Connection conn = DriverManager.getConnection(url, user, password);
return conn;
}
}
2、JDBC连接的方式,当然还有其他的连接方式,比如ODBC等, 这种方式很常用,可以在网上随便找到,就不再累赘了。不稳定,经常会被大数据量冲挂,不建议使用。 3、这种方式是直接利用Hive的 Driver class 来直接连接,感觉这种方式不通过JDBC,应该速度会比较快一点(未经验证)。我只是在local模式下测试过。