基于之前的hive的基础语法的学习,以下是一个相对完整的练习案例:
第一步:创建基于maven的工程,hivest
第二步:配置pom.xml:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.gongyunit</groupId>
<artifactId>hivest</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>hivest</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.4</version>
</dependency>
</dependencies>
</project>
第三步:创建包:com.gongyunit.hivest
第四步:在该包下创建类: HiveJDBCAPI 。具体代码如下:
package com.gongyunit.hivest;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class HiveJDBCAPI {
public static Connection conn = null;
public static Statement stmt = null;
static {
try {
Class.forName("org.apache.hive.jdbc.HiveDriver");
//应用代码时要讲下边的ip地址换成hive所在机器的时机地址
conn = DriverManager.getConnection("jdbc:hive2://127.0.0.1:10000/default", "", "");
stmt = conn.createStatement();
} catch (Exception e) {
e.printStackTrace();
}
}
// 创建hive数据库,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void createDataBase() {
try {
stmt.execute("create database hivedb");
System.out.println("数据库创建成功");
} catch (SQLException e) {
e.printStackTrace();
}
}
// 删除hive数据库,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void dropDataBase() {
try {
stmt.execute("drop database hivedb");
System.out.println("数据库删除成功");
} catch (SQLException e) {
e.printStackTrace();
}
}
// 创建hive数据库表,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void createTable(String tableName) {
try {
stmt.execute("create table "+tableName+ "( "
+ "name string, "
+ "des string "
+ ")"
+ " ROW FORMAT DELIMITED"
+ " FIELDS TERMINATED BY '\t'"
+ " STORED AS TEXTFILE");
System.out.println("hive数据库表创建成功");
} catch (SQLException e) {
e.printStackTrace();
}
}
// 删除hive数据库表,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void dropTable(String tableName) throws SQLException {
String sql = "drop table " + tableName;
stmt.execute(sql);
System.out.println("hive删除数据库表成功");
}
// 显示特定的表是否存在
public static void showTables(String tableName) throws SQLException {
String sql = "show tables '" + tableName + "'";
ResultSet res = stmt.executeQuery(sql);
System.out.println("执行 show tables 运行结果:");
if (res.next()) {
System.out.println(res.getString(1));
}
}
// 显示表的字段属性
public static void describeTables(String tableName) throws SQLException {
String sql = "describe " + tableName;
ResultSet res = stmt.executeQuery(sql);
System.out.println("执行 describe table 运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}
// 通过本地文件,向hive中加载数据
public static void loadData(String tableName, String filepath) throws SQLException {
// 目录 ,我的是hive安装的机子的虚拟机的home目录下
String sql = "load data local inpath '" + filepath + "' into table " + tableName;
stmt.execute(sql);
System.out.println("加载数据成功");
}
// 查询表中的数据
public static void selectData(String tableName) throws SQLException {
String sql = "select * from " + tableName;
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}
// hive的聚合函数查询表有多少条记录。
public static void countData(String tableName) throws SQLException {
String sql = "select count(*) from " + tableName;
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println("表中的记录个数是: " + res.getString(1));
}
}
// 利用selectWhere,查询表有多少条记录。
public static void selectWhere(String tableName) throws SQLException {
String sql = "select * FROM "+tableName+" WHERE name = 'mike2'";
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + " " + res.getString(2));
}
}
public static void main(String args[]) {
try {
HiveJDBCAPI.createDataBase();
// HiveJDBCAPI.dropDataBase();
// HiveJDBCAPI.createTable("hivetb");
// HiveJDBCAPI.createTable("hivetb2");
// HiveJDBCAPI.dropTable("hivetb");
// HiveJDBCAPI.showTables("hivetb");
// HiveJDBCAPI.describeTables("hivetb");
// HiveJDBCAPI.loadData("hivetb",
// "/home/project/soft/hive/apache-hive-1.2.1-bin/bin/data.txt");
// HiveJDBCAPI.selectData("hivetb");
// HiveJDBCAPI.countData("hivetb");
// HiveJDBCAPI.selectWhere("hivetb");
} catch (Exception e) {
e.printStackTrace();
}finally{
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
第五步:执行main方法。借助eclipse的run as 即可。




