1、hive开放jdbc接口,开发需要导入jar如下
commons-logging-1.0.4.jar
hadoop-common-2.6.0.jar
hive_exec.jar
hive_jdbc.jar
hive_metastore.jar
hive_service.jar
httpclient-4.2.5.jar
httpcore-4.2.5.jar
libfb303.jar
log4j-1.2.16.jar
slf4j-api-1.7.5.jar
slf4j-log4j12-1.7.5.jar
2、开发的实例程序如下
package com.gw;import java.io.InputStream;
import java.net.URL;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import org.apache.log4j.Logger;
/**
* @author wangweifeng
* @description:通过jdbc连接到hive实现如下操作:建表、删表、对表进行描述、 从本地文件系统导入数据、执行查询
*
*/
public class HiveJdbcClient {
public static Statement jdbcStatemt;
private static ResultSet res = null;
private static final Logger log = Logger.getLogger(HiveJdbcClient.class);
// 采用单例模式创建一个jdbcstatemt
public static Statement getJdbcStatemt() throws Exception {
if (jdbcStatemt == null) {
Properties prop = new Properties();
URL url = Thread.currentThread().getContextClassLoader()
.getResource("config.properties");
InputStream in = url.openStream();
prop.load(in);
String driverName = prop.getProperty("driverName");
String jdbcUrl = prop.getProperty("jdbcUrl");
String userName = prop.getProperty("userName");
String password = prop.getProperty("password");
// 1、使用CLASS 类加载驱动程序
Class.forName(driverName);
// 2、连接数据库
Connection con = DriverManager.getConnection(jdbcUrl, userName,
password);
// 3、Statement 接口需要通过Connection 接口进行实例化操作
jdbcStatemt = con.createStatement();
}
return jdbcStatemt;
}
// 关闭jdbc连接
public static void closeStatement() throws Exception {
if (!jdbcStatemt.isClosed()) {
jdbcStatemt.close();
}
}
public static void main(String[] args) {
try {
jdbcStatemt = getJdbcStatemt();
} catch (Exception e) {
e.printStackTrace();
}
String Hql = "";
/* String tableName = "DemiClass"; */
/* String tableName = "DemiDate"; */
/* String tableName = "DemiDept"; */
String tableName = "tXsXsMxFactA";
String rowTerminated = " ROW FORMAT DELIMITED FIELDS TERMINATED BY ‘,‘ ";
String lineTerminated = " LINES TERMINATED BY ‘\n‘ ";
// 1、删除表
Hql = "DROP TABLE IF EXISTS " + tableName;
try {
jdbcStatemt.execute(Hql);
System.out.println("表删除成功!");
} catch (SQLException e1) {
System.out.println("表删除失败!");
e1.printStackTrace();
}
// 2、创建表
/*
* Hql = "CREATE EXTERNAL TABLE " + tableName +
* "(clscode STRING,cty_big STRING,cty_sml STRING) " + rowTerminated +
* lineTerminated;
*/
/*
* Hql = "CREATE EXTERNAL TABLE " + tableName +
* "(XSDATE STRING,YEAR STRING,MONTH STRING,DAY STRING) " +
* rowTerminated + lineTerminated;
*/
/*
* Hql = "CREATE EXTERNAL TABLE " + tableName +
* "(DEPCODE STRING,BRANCH_NAM STRING,GROUP_NAME STRING,DEPNAME STRING) "
* + rowTerminated + lineTerminated;
*/
Hql = "CREATE EXTERNAL TABLE "
+ tableName
+ "(CLSCODE STRING,DEPCODE STRING,XSDATE STRING,JXCOUNT INT,DXCOUNT INT,LXCOUNT INT,JXTOTAL DOUBLE,DXTOTAL DOUBLE,LXTOTAL DOUBLE) "
+ rowTerminated + lineTerminated;
try {
jdbcStatemt.execute(Hql);
System.out.println("表创建成功!");
} catch (SQLException e) {
System.out.println("表创建失败!");
e.printStackTrace();
}
// 3、执行[show tables]操作
Hql = " SHOW TABLES ‘" + tableName + "‘";
System.out.println("Running:" + Hql);
try {
res = jdbcStatemt.executeQuery(Hql);
if (res.next()) {
System.out.println(res.getString(1));
}
} catch (SQLException e) {
e.printStackTrace();
}
// 4、执行[describe table]操作
Hql = " DESCRIBE " + tableName;
System.out.println("Running:" + Hql);
try {
res = jdbcStatemt.executeQuery(Hql);
System.out.println("执行" + Hql + " 运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
} catch (SQLException e) {
e.printStackTrace();
}
// 5、执行[load data into table]操作
/*
* String filePath =
* "/home/Hadoop/Desktop/createTable_And_Data/DemiClass.csv"; Hql =
* "LOAD DATA LOCAL INPATH ‘" + filePath +
* "‘ OVERWRITE INTO TABLE DemiClass";
*/
/*
* String filePath =
* "/home/Hadoop/Desktop/createTable_And_Data/DemiDate.csv"; Hql =
* "LOAD DATA LOCAL INPATH ‘" + filePath+
* "‘ OVERWRITE INTO TABLE DemiDate";
*/
/*
* String filePath =
* "/home/Hadoop/Desktop/createTable_And_Data/DemiDept.csv"; Hql =
* "LOAD DATA LOCAL INPATH ‘" + filePath+
* "‘ OVERWRITE INTO TABLE DemiDept";
*/
String filePath = "/home/Hadoop/Desktop/createTable_And_Data/tXsXsMxFactA.csv";
Hql = "LOAD DATA LOCAL INPATH ‘" + filePath
+ "‘ OVERWRITE INTO TABLE tXsXsMxFactA";
System.out.println("Running:" + Hql);
try {
jdbcStatemt.executeQuery(Hql);
System.out.println("数据导入成功!");
} catch (Exception SQLException) {
System.out.println("bug!");
}
// 6、执行[select * query]操作
// 执行[select * query]操作
Hql = "SELECT * FROM " + tableName;
System.out.println("Running:" + Hql);
try {
res = jdbcStatemt.executeQuery(Hql);
System.out.println("执行[select * query]运行结果:");
while (res.next()) {
/*
* System.out.println(res.getInt(1) + "\t" + res.getString(2) +
* "\t" + res.getString(3));
*/
/*
* System.out.println(res.getString(1) + "\t" + res.getString(2)
* + "\t" + res.getString(3) + "\t" + res.getString(4));
*/
System.out.println(res.getString(1) + "\t" + res.getString(2)
+ "\t" + res.getString(3) + "\t" + res.getInt(4) + "\t"
+ res.getInt(5) + "\t" + res.getInt(6) + "\t"
+ res.getDouble(7) + "\t" + res.getDouble(8) + "\t"
+ res.getDouble(9) + "\t");
}
} catch (SQLException e) {
e.printStackTrace();
}
// 7、执行一个连接操作,查找某一天,某个部门,
Hql = " select dd.XSDATE ,ddp.BRANCH_NAM, dc.CTY_BIG, fac.DXCOUNT, fac.DXTOTAL from tXsXsMxFactA fac join DemiClass dc on (fac.CLSCODE = dc.CLSCODE)"
+ "join DemiDate dd on (fac.XSDATE = dd.XSDATE) join DemiDept ddp on (fac.DEPCODE = ddp.DEPCODE) WHERE dd.XSDATE BETWEEN ‘2004-04-27‘ AND ‘2004-05-20‘";
try {
res = jdbcStatemt.executeQuery(Hql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2)
+ "\t" + res.getString(3) + "\t" + res.getInt(4) + "\t"
+ res.getDouble(5));
}
} catch (Exception e) {
e.printStackTrace();
}
try {
closeStatement();
} catch (Exception e) {
e.printStackTrace();
}
}
}
Hive中jdbc接口
标签:
声明:本文来自网络,不代表【好得很程序员自学网】立场,转载请注明出处:http://www.haodehen.cn/did118744