您的位置:首页 > 其它

2.x的版本hive的api调用增加,删除,添加数据

2016-05-26 21:56 423 查看
hivesql语句其实sql差不多,在api调用时也简单用execute()或executeQuery()即可,不过在创建表时,注意指定数据的读入格式,否则就不能插入数据。

代码展示:

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

public class HiveTestApi001 {

private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://master:10000/default";
private static String user = "hive";
private static String password = "hive";
public static void main(String[] args) {
Connection conn = null;
try {
conn = getConn();
System.out.println(   conn );
Statement stmt = conn.createStatement();
createtable();//创建hellohive表
selectalltable();//显示表
desctable();    //查看表结构
loadData(stmt,"hellohive");//向表中查入数据
selecttable(stmt,"hellohive");//查询表中的数据

} catch (Exception e) {
e.printStackTrace();
}
}

//建立连接
private static Connection getConn() throws ClassNotFoundException,SQLException {
Class.forName(driverName);
Connection conn = DriverManager.getConnection(url, user, password);
return conn;
}

//创建表以及删除表
private static void createtable(){
try {
Connection conn =getConn();
Statement stmt = conn.createStatement();
String tableName = "hellohive";
stmt.execute("drop table if exists " + tableName);
//stmt.execute("drop table if exists"+tableName);//如果表存在则将其删除
stmt.execute("create table "+tableName+" (key int, value string)  row format delimited fields terminated by '\t'");
System.out.println("Create table success!");
} catch (Exception e) {
e.printStackTrace();
}
}

//显示存在的表
private static void selectalltable(){
try {
Connection conn =getConn();
Statement stmt = conn.createStatement();
String tableName = "hellohive";
String sql="show tables  '"+tableName+"'";
ResultSet res=stmt.executeQuery(sql);
if(res.next()){
System.out.println(res.getString(1));
}
} catch (Exception e) {
e.printStackTrace();
}
}

//查看表结构
private static void desctable(){
try {
Connection conn =getConn();
Statement stmt = conn.createStatement();
String tableName = "hellohive";
String sql = "describe " + tableName;
System.out.println("Running: " + sql);
ResultSet  res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
} catch (ClassNotFoundException | SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

private static void loadData(Statement stmt, String tableName)throws SQLException {
String filepath = "/home/hadoop/user.txt";
String sql = "load data local inpath '" + filepath + "' into table "
+ tableName;
System.out.println("Running:" + sql);
stmt.execute(sql);
}

private static void selecttable(Statement stmt, String tableName) throws SQLException{
String sql = "select * from " + tableName;
ResultSet   res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) + "\t"
+ res.getString(2));
}
}


}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  hive api 数据