代码语言
.
CSharp
.
JS
Java
Asp.Net
C
MSSQL
PHP
Css
PLSQL
Python
Shell
EBS
ASP
Perl
ObjC
VB.Net
VBS
MYSQL
GO
Delphi
AS
DB2
Domino
Rails
ActionScript
Scala
代码分类
文件
系统
字符串
数据库
网络相关
图形/GUI
多媒体
算法
游戏
Jquery
Extjs
Android
HTML5
菜单
网页交互
WinForm
控件
企业应用
安全与加密
脚本/批处理
开放平台
其它
【
Java
】
hive经典例子
作者:
/ 发布于
2017/2/6
/
594
package com.bailiangroup.bigdata.utils; import java.io.File; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import org.apache.log4j.Logger; public class HiveJdbcCliUtil { /** * Hive的JavaApi * * 启动hive的远程服务接口命令行执行:hive --service hiveserver >/dev/null 2>/dev/null & * * */ String driverName; String url; String username; String password; String sql = ""; ResultSet res = null; Connection conn = null; Statement stmt = null; static final Logger log = Logger.getLogger(HiveJdbcCliUtil.class); public HiveJdbcCliUtil(String driverName, String url, String username, String password) { this.driverName = driverName; this.url = url; this.username = username; this.password = password; } public void init() { try { Class.forName(driverName); if (conn == null) { conn = DriverManager.getConnection(url, username, password); } if (stmt == null) { stmt = conn.createStatement(); } } catch (Exception e) { e.printStackTrace(); } } public void showAllFiles(File dir) throws Exception { File[] fs = dir.listFiles(); for (int i = 0; i < fs.length; i++) { //System.out.println(fs[i].getAbsolutePath()); String name = fs[i].getName(); //System.out.println("name =" + name); String substring = null; String date = null; if(name.length()>14){ substring = name.substring(name.length() - 14, name.length()); date = substring.substring(0, 10); //System.out.println(date); } String s17_kf_case_normal = "sourcedata.s17_kf_case_normal"; String s17_kf_case_postil = "sourcedata.s17_kf_case_postil"; String s17_kf_case_transfer_his = "sourcedata.s17_kf_case_transfer_his"; String s17_kf_customer_info = "sourcedata.s17_kf_customer_info"; String s17_kf_main_cases = "sourcedata.s17_kf_main_cases"; if (name.contains("KF_CASE_NORMAL")) { System.out.println("------>"+fs[i].getAbsolutePath()); sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + " overwrite into table " + s17_kf_case_normal + " partition(" + "dt ='" + date + "')"; log.info("Running:" + sql); System.out.println("sql==="+sql); stmt.execute(sql); } if (name.contains("KF_CASE_POSTIL")) { System.out.println("------>"+fs[i].getAbsolutePath()); sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + " overwrite into table " + s17_kf_case_postil + " partition(" + "dt ='" + date + "')"; System.out.println("sql==="+sql); log.info("Running:" + sql); stmt.execute(sql); } if (name.contains("KF_CASE_TRANSFER_HIS")) { System.out.println("------>"+fs[i].getAbsolutePath()); sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + " overwrite into table " + s17_kf_case_transfer_his + " partition(" + "dt ='" + date + "')"; System.out.println("sql==="+sql); log.info("Running:" + sql); stmt.execute(sql); } if (name.contains("KF_CUSTOMER_INFO")) { System.out.println("------>"+fs[i].getAbsolutePath()); sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + " overwrite into table " + s17_kf_customer_info + " partition(" + "dt ='" + date + "')"; System.out.println("sql==="+sql); log.info("Running:" + sql); stmt.execute(sql); } if (name.contains("KF_MAIN_CASES")) { System.out.println("------>"+fs[i].getAbsolutePath()); sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + " overwrite into table " + s17_kf_main_cases + " partition(" + "dt ='" + date + "')"; System.out.println("sql==="+sql); log.info("Running:" + sql); stmt.execute(sql); } if (fs[i].isDirectory()) { try { showAllFiles(fs[i]); } catch (Exception e) { } } } } public static Connection getConn(String driverName, String url, String username, String password) { Connection conn = null; try { Class.forName(driverName); conn = DriverManager.getConnection(url, username, password); } catch (Exception e) { e.printStackTrace(); } return conn; } public void selectData(Statement stmt, String tableName) throws SQLException { sql = "select * from " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行 select * query 运行结果:"); while (res.next()) { System.out.println(res.getString(1) + "\t" + res.getString(2)); log.info((res.getString(1) + "\t" + res.getString(2))); } } public static void main(String[] args) throws Exception { String driverName = "org.apache.hive.jdbc.HiveDriver"; String url = "jdbc:hive2://10.201.129.78:10000/default"; // String username = ""; String password = ""; HiveJdbcCliUtil hiveJdbcCliUtil = new HiveJdbcCliUtil(driverName, url, username, password); hiveJdbcCliUtil.init(); //selectData(stmt, "test_hive"); String filepath = "E:\\ftp\\kefu2\\"; File dir = new File(filepath); try { hiveJdbcCliUtil.showAllFiles(dir); } catch (Exception e) { e.printStackTrace(); } } public String getDriverName() { return driverName; } public void setDriverName(String driverName) { this.driverName = driverName; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } } public class HiveJdbcCli { private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver"; private static String url = "jdbc:hive://hadoop3:10000/default"; private static String user = "hive"; private static String password = "mysql"; private static String sql = ""; private static ResultSet res; private static final Logger log = Logger.getLogger(HiveJdbcCli.class); public static void main(String[] args) { Connection conn = null; Statement stmt = null; try { conn = getConn(); stmt = conn.createStatement(); // 第一步:存在就先删除 String tableName = dropTable(stmt); // 第二步:不存在就创建 createTable(stmt, tableName); // 第三步:查看创建的表 showTables(stmt, tableName); // 执行describe table操作 describeTables(stmt, tableName); // 执行load data into table操作 loadData(stmt, tableName); // 执行 select * query 操作 selectData(stmt, tableName); // 执行 regular hive query 统计操作 countData(stmt, tableName); } catch (ClassNotFoundException e) { e.printStackTrace(); log.error(driverName + " not found!", e); System.exit(1); } catch (SQLException e) { e.printStackTrace(); log.error("Connection error!", e); System.exit(1); } finally { try { if (conn != null) { conn.close(); conn = null; } if (stmt != null) { stmt.close(); stmt = null; } } catch (SQLException e) { e.printStackTrace(); } } } private static void countData(Statement stmt, String tableName) throws SQLException { sql = "select count(1) from " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行“regular hive query”运行结果:"); while (res.next()) { System.out.println("count ------>" + res.getString(1)); } } private static void selectData(Statement stmt, String tableName) throws SQLException { sql = "select * from " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行 select * query 运行结果:"); while (res.next()) { System.out.println(res.getInt(1) + "\t" + res.getString(2)); } } private static void loadData(Statement stmt, String tableName) throws SQLException { String filepath = "/home/hadoop01/data"; sql = "load data local inpath '" + filepath + "' into table " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); } private static void describeTables(Statement stmt, String tableName) throws SQLException { sql = "describe " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行 describe table 运行结果:"); while (res.next()) { System.out.println(res.getString(1) + "\t" + res.getString(2)); } } private static void showTables(Statement stmt, String tableName) throws SQLException { sql = "show tables '" + tableName + "'"; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行 show tables 运行结果:"); if (res.next()) { System.out.println(res.getString(1)); } } private static void createTable(Statement stmt, String tableName) throws SQLException { sql = "create table " + tableName + " (key int, value string) row format delimited fields terminated by '\t'"; stmt.executeQuery(sql); } private static String dropTable(Statement stmt) throws SQLException { // 创建的表名 String tableName = "testHive"; sql = "drop table " + tableName; stmt.executeQuery(sql); return tableName; } private static Connection getConn() throws ClassNotFoundException, SQLException { Class.forName(driverName); Connection conn = DriverManager.getConnection(url, user, password); return conn; } }
试试其它关键字
hive经典例子
同语言下
.
List 切割成几份 工具类
.
一行一行读取txt的内容
.
Java PDF转换成图片并输出给前台展示
.
java 多线程框架
.
double类型如果小数点后为零则显示整数否则保留两位小
.
将图片转换为Base64字符串公共类抽取
.
sqlParser 处理SQL(增删改查) 替换schema 用于多租户
.
JAVA 月份中的第几周处理 1-7属于第一周 依次类推 29-
.
java计算两个经纬度之间的距离
.
输入时间参数计算年龄
可能有用的
.
C#实现的html内容截取
.
List 切割成几份 工具类
.
SQL查询 多列合并成一行用逗号隔开
.
一行一行读取txt的内容
.
C#动态修改文件夹名称(FSO实现,不移动文件)
.
c# 移动文件或文件夹
.
c#图片添加水印
.
Java PDF转换成图片并输出给前台展示
.
网站后台修改图片尺寸代码
.
处理大图片在缩略图时的展示
贡献的其它代码
Label
Copyright © 2004 - 2024 dezai.cn. All Rights Reserved
站长博客
粤ICP备13059550号-3