1.在终端启动hiveserver2
#hiveserver2

2.使用beeline连接hive
另外打开一个终端,输入如下命令(xavierdb必须是已经存在的数据库)
#beeline -u jdbc:hive2://localhost:10000/xavierdb -n hive -p hive

3.添加maven依赖

<!-- https://mvnrepository.com/artifact/org.apache.hadoop.hive/hive-jdbc -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.1.0</version>
</dependency> <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.9</version>
</dependency> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency> <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
</dependency> <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-metastore -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>1.1.0</version>
</dependency> <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-metastore -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>1.1.0</version>
</dependency>

maven依赖

出现过的错误: Error: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:10000/default

解决办法:检查发现运行beeline时Driver版本Driver: Hive JDBC (version 1.1.0-cdh5.16.1)比maven依赖中的Driver版本低,将maven版本调至1.1.0问题解决

Java API测试:

注意:这里的url必须是beeline值中使用的url


package TestOption;

import org.junit.Test;
import org.junit.After;
import org.junit.Before; import java.sql.*; /**
* @Author:Xavier
* @Data:2019-02-18 11:43
**/ public class HiveOption { private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://yourhost:10000/yourdatabase"; private static Connection con = null;
private static Statement state = null;
private static ResultSet res = null; //加载驱动,创建连接
@Before
public void init() throws ClassNotFoundException, SQLException {
Class.forName(driverName);
con = DriverManager.getConnection(url, "hive", "hive");
state = con.createStatement();
} //创建数据库
@Test
public void CreateDb() throws SQLException { state.execute("create database xavierdb1"); } // 查询所有数据库
@Test
public void showtDb() throws SQLException {
res = state.executeQuery("show databases");
while (res.next()) {
System.out.println(res.getString(1));
}
} // 删除数据库
@Test
public void dropDb() throws SQLException {
state.execute("drop database if exists xavierdb1");
} /*
*
*
* 内部表基本操作
*
*
* */ // 创建表
@Test
public void createTab() throws SQLException { state.execute("create table if not exists student ( " +
"name string , " +
"age int , " +
"agent string ," +
"adress struct<street:STRING,city:STRING>) " +
"row format delimited " +
"fields terminated by ',' " +//字段与字段之间的分隔符
"collection items terminated by ':'" +//一个字段各个item的分隔符
"lines terminated by '\n' ");//行分隔符
} // 查询所有表
@Test
public void showTab() throws SQLException {
res = state.executeQuery("show tables");
while (res.next()) {
System.out.println(res.getString(1));
}
} // 查看表结构
@Test
public void descTab() throws SQLException {
res = state.executeQuery("desc emp");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
} // 加载数据
@Test
public void loadData() throws SQLException {
String infile = " '/root/studentData' ";
state.execute("load data local inpath " + infile + "overwrite into table student");
} // 查询数据
@Test
public void selectTab() throws SQLException {
res = state.executeQuery("select * from student1");
while (res.next()) {
System.out.println(
res.getString(1) + "-" +
res.getString(2) + "-" +
res.getString(3) + "-" +
res.getString(4));
}
} // 统计查询(会运行mapreduce作业,资源开销较大)
@Test
public void countData() throws SQLException {
res = state.executeQuery("select count(1) from student");
while (res.next()) {
System.out.println(res.getInt(1));
}
} // 删除表
@Test
public void dropTab() throws SQLException {
state.execute("drop table emp");
} /*
* 外部表基本操作
*
*外部表删除后,hdfs文件系统上的数据还在,
*重新创建同路径外部表后,其数据仍然存在
*
* */ //创建外部表
@Test
public void createExTab() throws SQLException { state.execute("create external table if not exists student1 ( " +
"name string , " +
"age int , " +
"agent string ," +
"adress struct<street:STRING,city:STRING>) " +
"row format delimited " +
"fields terminated by ',' " +
"collection items terminated by ':'" +
"lines terminated by '\n' " +
"stored as textfile " +
"location '/testData/hive/student1' ");//不指定路径时默认使用hive.metastore.warehouse.dir指定的路径
} //从一张已经存在的表上复制其表结构,并不会复制其数据
//
//创建表,携带数据
//create table student1 as select * from student
//创建表,携带表结构
//create table student1 like student
//
@Test
public void copyExTab() throws SQLException {
state.execute("create external table if not exists student2 " +
"like xavierdb.student " +
"location '/testData/hive/student1'");
} /*
* 分区表
*
* 必须在表定义时创建partition
*
*
* */ //静态分区 //创建分区格式表
@Test
public void creatPartab() throws SQLException {
state.execute("create table if not exists emp (" +
"name string ," +
"salary int ," +
"subordinate array<string> ," +
"deductions map<string,float> ," +
"address struct<street:string,city:string>) " +
"partitioned by (city string,street string) " +
"row format delimited " +
"fields terminated by '\t' " +
"collection items terminated by ',' " +
"map keys terminated by ':' " +
"lines terminated by '\n' " +
"stored as textfile");
} //添加分区表
@Test
public void addPartition() throws SQLException {
state.execute("alter table emp add partition(city='shanghai',street='jinkelu') ");
} //查看分区表信息
@Test
public void showPartition() throws SQLException {
// res=state.executeQuery("select * from emp");
res = state.executeQuery("show partitions emp");
while (res.next()) {
System.out.println(res.getString(1));
}
} //插入数据
@Test
public void loadParData() throws SQLException {
String filepath = " '/root/emp' ";
state.execute("load data local inpath " + filepath + " overwrite into table emp partition (city='shanghai',street='jinkelu')");
} //删除分区表
@Test
public void dropPartition() throws SQLException {
state.execute("alter table employees drop partition (city='shanghai',street='jinkelu') ");
/*
*
* 1,把一个分区打包成一个har包
alter table emp archive partition (city='shanghai',street='jinkelu')
2, 把一个分区har包还原成原来的分区
` alter table emp unarchive partition (city='shanghai',street='jinkelu')
3, 保护分区防止被删除
alter table emp partition (city='shanghai',street='jinkelu') enable no_drop
4,保护分区防止被查询
alter table emp partition (city='shanghai',street='jinkelu') enable offline
5,允许分区删除和查询
alter table emp partition (city='shanghai',street='jinkelu') disable no_drop
alter table emp partition (city='shanghai',street='jinkelu') disable offline
* */
}
//外部表同样可以使用分区 //动态分区
//
//当需要一次插入多个分区的数据时,可以使用动态分区,根据查询得到的数据动态分配到分区里。
// 动态分区与静态分区的区别就是不指定分区目录,由hive根据实际的数据选择插入到哪一个分区。
//
//set hive.exec.dynamic.partition=true; 启动动态分区功能
//set hive.exec.dynamic.partition.mode=nonstrict 分区模式,默认nostrict
//set hive.exec.max.dynamic.partitions=1000 最大动态分区数,默认1000 //创建分区格式表
@Test
public void creatPartab1() throws SQLException {
state.execute("create table if not exists emp1 (" +
"name string ," +
"salary int ," +
"subordinate array<string> ," +
"deductions map<string,float> ," +
"address struct<street:string,city:string>) " +
"partitioned by (city string,street string) " +
"row format delimited " +
"fields terminated by '\t' " +
"collection items terminated by ',' " +
"map keys terminated by ':' " +
"lines terminated by '\n' " +
"stored as textfile");
} //靠查询到的数据来分区
@Test
public void loadPartitionData() throws SQLException {
state.execute("insert overwrite table emp1 partition (city='shanghai',street) " +
"select name,salary,subordinate,deductions,address,address.street from emp");
} // 释放资源
@After
public void destory() throws SQLException {
if (res != null) state.close();
if (state != null) state.close();
if (con != null) con.close();
}
}
 

***连接HiveServer2修改hive配置的方法***

1)、直接在URL中添加

...
url = "jdbc:hive2://yourhost:10000/yourdatabase?mapreduce.job.queuename=root.hive-server2;hive.execution.enginer=spark";
Connection con = DriverManager.getConnection(url, "hive", "hive");
...

多个conf配置之间使用" ; "  分割开;conf配置内容和url 变量内容使用" # "分割开

2)、使用state.execute 直接执行set 操作

...
state.execute("set hive.execution.engine=spark");
...

3 )、通过连接属性设置

Class.forName("org.apache.hive.jdbc.HiveDriver");
Properties propertie = new Properties();
propertie.setProperty("user", "hive");
propertie.setProperty("password", "hive");
// 这里传递了一个队列的hive_conf
propertie.setProperty("hive.execution.engine", "spark");
String url="jdbc:hive2://yourhost:10000/yourdatabase";
Connection conn = DriverManager.getConnection(url, propertie);
HiveStatement stat = (HiveStatement) conn.createStatement();

用Java代码通过JDBC连接Hiveserver2的更多相关文章

  1. Hive:用Java代码通过JDBC连接Hiveserver

    参考https://www.iteblog.com/archives/846.html 1.hive依赖hadoop,将hdfs当作文件存储介质,那是否意味着hive需要知道namenode的地址? ...

  2. java代码实现JDBC连接MySql以及引用驱动程序包

    JDBC链接MySql     JDBC链接MySql的话题已经老掉牙了,这次我只想通过使用简洁的代码实现,采用封装的思想,将链接MySql的代码封装在类的静态方法中,供一次性调用返回java.sql ...

  3. 通过JDBC连接HiveServer2

    如果通过JDBC连接HiveServer2时提示:User: hive is not allowed to impersonate hive,需要在core-site.xml中新增如下配置: hado ...

  4. Java基础93 JDBC连接MySQL数据库

    本文知识点(目录): 1.什么是jdbc     2.jdbc接口的核心API     3.使用JDBC技术连接MySQL数据库的方法    4.使用Statement执行sql语句(DDL.DML. ...

  5. 【JDBC】java程序通过jdbc连接oracle数据库方法

    版权声明:本文为博主原创文章(原文:blog.csdn.net/clark_xu 徐长亮的专栏).未经博主同意不得转载. https://blog.csdn.net/u011538954/articl ...

  6. 大数据系列-java用官方JDBC连接greenplum数据库

    这个其实非常简单,之所以要写此文是因为当前网上搜索到的文章都是使用PostgreSQL的驱动,没有找到使用greenplum官方驱动的案例,两者有什么区别呢? 一开始我也使用的是PostgreSQL的 ...

  7. java 命令行JDBC连接Mysql

    环境:Windows10 + java8 + mysql 8.0.15 + mysql-connector-java-8.0.15.jar mysql驱动程序目录 项目目录 代码: //package ...

  8. (转)CDH中启动的hive,使用jdbc连接hiveServer2时候权限不足解决方案

    Hive JDBC:java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.securi ...

  9. Java是用JDBC连接MySQL数据库

    首先要下载Connector/J地址:http://www.mysql.com/downloads/connector/j/ 这是MySQL官方提供的连接方式: 解压后得到jar库文件,需要在工程中导 ...

随机推荐

  1. selenium常用的模块

    from selenium import webdriver #select模块处理下拉框from selenium.webdriver.support.ui import Select # Keys ...

  2. Android 开发 8.0版本启动Service的方法

    前言  google在更新Android8.0后对Service的权限越发收紧.导致目前想要启动服务必需实现服务的前台化(否则在服务启动5秒后,系统将自动报错).下面我们就来看看如何在8.0上启动服务 ...

  3. Linux on window初体验

    参照来源: https://www.cnblogs.com/enet01/p/7458767.html 1:liunx on window 的配置不多说(百度网上很多)启动开发这模式,在应用和程序中勾 ...

  4. dubbo rest服务 No provider available for the service 错误问题

    1.版本 dubbo 2.6.2 2.描述 消费者调用dubbo rest服务报No provider available for the service错误 网络上有讲是实体类未实现Serializ ...

  5. mybatis-plus报org.apache.ibatis.binding.BindingException分析【转载】

    这个问题整整纠结了我四个多小时,心好累啊...不废话... 背景:Spring整合Mybatis 报错:org.apache.ibatis.binding.BindingException: Inva ...

  6. TIMESTAMP类型字段在SQL Server和MySQL中的含义和使用

    公众号上转的满天飞的一篇文章,MySQL优化相关的,无意中瞄到一句“尽量使用TIMESTAMP而非DATETIME”,之前对TIMESTAMP也不太熟悉,很少使用,于是查了一下两者的区别. 其实,不管 ...

  7. Install Oracle Database client in silent mode

    下面通过在工作中的使用,总结出不同版本Oracle client的静默(silent)安装方法. Oracle Database client 12.2.0.1 1. reponse file con ...

  8. week05 codelab01 Babel ES6 webpack Nodejsserver等

    Babel 他出现的原因就是很多浏览器还未完全兼容ES6 需要将你写的ES6的内容转换成ES5让浏览器兼容运行 ES5和ES6相比出现很多新内容 比如拼接字符串 ES6可以` ` 里面如果引用变量就用 ...

  9. win10 开发mfc 64位 ocx控件

    问题1.模块“XXX.ocx”加载失败 解决办法:项目--〉属性--〉常规-〉配置类型-〉  动态库(.dll) 修改为 静态库(.lib) 问题2.1>x64\Release\stdafx.o ...

  10. pytorch1.0 安装执行后报错ImportError: No module named future.utils

    File "/usr/local/lib/python2.7/dist-packages/caffe2/python/utils.py", line 10, in <modu ...