CREATE TABLE page_view(viewTime INT, userid BIGINT,p_date timestamp,
page_url STRING, referrer_url varchar(200),
ip STRING COMMENT 'IP Address of the User')
COMMENT 'This is the page view table'
PARTITIONED BY(dt STRING, country STRING)
STORED AS SEQUENCEFILE
TBLPROPERTIES ('creator'='wx','date'='2015-07-18'); CREATE TABLE students (name VARCHAR(64), age INT, gpa DECIMAL(3, 2))
CLUSTERED BY (age) INTO 2 BUCKETS STORED AS rcfile; CREATE TABLE user_info_bucketed(user_id BIGINT, firstname STRING, lastname STRING)
COMMENT 'A bucketed copy of user_info'
PARTITIONED BY(ds STRING)
CLUSTERED BY(user_id) SORTED BY(firstname) INTO 3 BUCKETS;
Show Databases
hive> show databases;
default
test
hive> SHOW DATABASES LIKE 'te*';
test
hive> SHOW DATABASES LIKE 'te*|de*';
default
test

Show Tables

hive> SHOW TABLES;
students
user_info_bucketed
hive> SHOW TABLES IN test;
students
user_info_bucketed
hive> use test;
hive> SHOW TABLES 'user*';
user_info_bucketed
hive> SHOW TABLES 'user*|stu*';
students
user_info_bucketed

Show Partitions

hive> SHOW PARTITIONS user_info_bucketed;
ds=2015-07-20
ds=2015-07-25
ds=2015-07-30
hive> SHOW PARTITIONS user_info_bucketed PARTITION(ds='2015-07-25');
ds=2015-07-25
hive> SHOW PARTITIONS test.user_info_bucketed PARTITION(ds='2015-07-25');
ds=2015-07-25
hive> SHOW TABLE EXTENDED in test LIKE 'user_info_bucketed' PARTITION(ds='2015-07-20');
tableName:user_info_bucketed
owner:wx
location:hdfs://ns1/user/hive/warehouse/test.db/user_info_bucketed/ds=2015-07-20
inputformat:org.apache.hadoop.mapred.TextInputFormat
outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
columns:struct columns { i64 user_id, string firstname, string lastname}
partitioned:true
partitionColumns:struct partition_columns { string ds}
totalNumberFiles:3
totalFileSize:72
maxFileSize:36
minFileSize:14
lastAccessTime:1437793073583
lastUpdateTime:1437823862864

Show Table Properties

hive> SHOW TBLPROPERTIES page_view;
comment This is the page view table
creator wx
date 2015-07-18
transient_lastDdlTime 1437825559
hive> SHOW TBLPROPERTIES page_view("creator");
wx

Show Create Table

hive> SHOW CREATE TABLE page_view;

CREATE TABLE `page_view`(

  `viewtime` int,

  `userid` bigint,

  `p_date` timestamp,

  `page_url` string,

  `referrer_url` varchar(200),

  `ip` string COMMENT 'IP Address of the User')

COMMENT 'This is the page view table'

PARTITIONED BY (

  `dt` string,

  `country` string)

ROW FORMAT SERDE

  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'

STORED AS INPUTFORMAT

  'org.apache.hadoop.mapred.SequenceFileInputFormat'

OUTPUTFORMAT

  'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'

LOCATION

  'hdfs://ns1/user/hive/warehouse/test.db/page_view'

TBLPROPERTIES (

  'creator'='wx',

  'date'='2015-07-18',

  'transient_lastDdlTime'='1437825559')

hive> SHOW CREATE TABLE user_info_bucketed;

CREATE TABLE `user_info_bucketed`(

  `user_id` bigint,

  `firstname` string,

  `lastname` string)

COMMENT 'A bucketed copy of user_info'

PARTITIONED BY (

  `ds` string)

CLUSTERED BY (

  user_id)

SORTED BY (

  firstname ASC)

INTO 3 BUCKETS

ROW FORMAT SERDE

  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'

STORED AS INPUTFORMAT

  'org.apache.hadoop.mapred.TextInputFormat'

OUTPUTFORMAT

  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'

LOCATION

  'hdfs://ns1/user/hive/warehouse/test.db/user_info_bucketed'

TBLPROPERTIES (

  'transient_lastDdlTime'='1437793010')

hive> SHOW CREATE TABLE test.students;

CREATE TABLE `test.students`(

  `name` varchar(64),

  `age` int,

  `gpa` decimal(3,2))

CLUSTERED BY (

  age)

INTO 2 BUCKETS

ROW FORMAT SERDE

  'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'

STORED AS INPUTFORMAT

  'org.apache.hadoop.hive.ql.io.RCFileInputFormat'

OUTPUTFORMAT

  'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'

LOCATION

  'hdfs://ns1/user/hive/warehouse/test.db/students'

TBLPROPERTIES (

  'transient_lastDdlTime'='1437793374')

Show Indexes

hive> SHOW INDEXES ON user_info_bucketed;

inx_1                   user_info_bucketed      user_id                 test__user_info_bucketed_inx_1__        compact                 index-my!!!!       

hive> SHOW FORMATTED INDEXES ON user_info_bucketed;

idx_name                tab_name                col_names               idx_tab_name                    idx_type                comment 

inx_1                   user_info_bucketed      user_id                 test__user_info_bucketed_inx_1__        compact                 index-my!!!!       

hive> SHOW FORMATTED INDEXES ON user_info_bucketed IN test;

inx_1                   user_info_bucketed      user_id                 test__user_info_bucketed_inx_1__        compact                 index-my!!!!

Show Columns

hive> SHOW COLUMNS IN page_view;

viewtime           

userid             

p_date             

page_url           

referrer_url       

ip                 

dt                 

country            

hive> SHOW COLUMNS IN page_view in test;

viewtime           

userid             

p_date             

page_url           

referrer_url       

ip                 

dt                 

country

Show Functions

hive> SHOW FUNCTIONS;

hive> SHOW FUNCTIONS "a.*";

SHOW FUNCTIONS is deprecated, please use SHOW FUNCTIONS LIKE instead.

abs

acos

add_months

and

array

array_contains

ascii

asin

assert_true

atan

avg

hive> SHOW FUNCTIONS LIKE "a*";

abs

acos

add_months

and

array

array_contains

ascii

asin

assert_true

atan

avg

hive> SHOW FUNCTIONS LIKE "*a*";

Show Conf

Note that SHOW CONF does not show the current value of a configuration property. 

hive> SHOW CONF 'hive.exec.parallel';
false BOOLEAN Whether to execute jobs in parallel

Hive show的更多相关文章

  1. 初识Hadoop、Hive

    2016.10.13 20:28 很久没有写随笔了,自打小宝出生后就没有写过新的文章.数次来到博客园,想开始新的学习历程,总是被各种琐事中断.一方面确实是最近的项目工作比较忙,各个集群频繁地上线加多版 ...

  2. Hive安装配置指北(含Hive Metastore详解)

    个人主页: http://www.linbingdong.com 本文介绍Hive安装配置的整个过程,包括MySQL.Hive及Metastore的安装配置,并分析了Metastore三种配置方式的区 ...

  3. Hive on Spark安装配置详解(都是坑啊)

    个人主页:http://www.linbingdong.com 简书地址:http://www.jianshu.com/p/a7f75b868568 简介 本文主要记录如何安装配置Hive on Sp ...

  4. HIVE教程

    完整PDF下载:<HIVE简明教程> 前言 Hive是对于数据仓库进行管理和分析的工具.但是不要被“数据仓库”这个词所吓倒,数据仓库是很复杂的东西,但是如果你会SQL,就会发现Hive是那 ...

  5. 基于Ubuntu Hadoop的群集搭建Hive

    Hive是Hadoop生态中的一个重要组成部分,主要用于数据仓库.前面的文章中我们已经搭建好了Hadoop的群集,下面我们在这个群集上再搭建Hive的群集. 1.安装MySQL 1.1安装MySQL ...

  6. hive

    Hive Documentation https://cwiki.apache.org/confluence/display/Hive/Home 2016-12-22  14:52:41 ANTLR  ...

  7. 深入浅出数据仓库中SQL性能优化之Hive篇

    转自:http://www.csdn.net/article/2015-01-13/2823530 一个Hive查询生成多个Map Reduce Job,一个Map Reduce Job又有Map,R ...

  8. Hive读取外表数据时跳过文件行首和行尾

    作者:Syn良子 出处:http://www.cnblogs.com/cssdongl 转载请注明出处 有时候用hive读取外表数据时,比如csv这种类型的,需要跳过行首或者行尾一些和数据无关的或者自 ...

  9. Hive索引功能测试

    作者:Syn良子 出处:http://www.cnblogs.com/cssdongl 转载请注明出处 从Hive的官方wiki来看,Hive0.7以后增加了一个对表建立index的功能,想试下性能是 ...

  10. 轻量级OLAP(二):Hive + Elasticsearch

    1. 引言 在做OLAP数据分析时,常常会遇到过滤分析需求,比如:除去只有性别.常驻地标签的用户,计算广告媒体上的覆盖UV.OLAP解决方案Kylin不支持复杂数据类型(array.struct.ma ...

随机推荐

  1. 在ABBYY中如何修正倾斜的PDF页面

    作为一名文案工作者,每天都要跟各种PDF文件打交道,合同.报价单.协议书等等等,通常提供给客户的都是扫描之后的PDF文档,虽说都是机器扫描,但毕竟是人为放置的,难免位置放置不齐,导致扫描出来的文档出现 ...

  2. docker 相关文章

    https://baijiahao.baidu.com/s?id=1581420975184566963&wfr=spider&for=pc    创建centos基础镜像 https ...

  3. Present ViewController Modally (转)

    一.主要用途 弹出模态ViewController是IOS变成中很有用的一个技术,UIKit提供的一些专门用于模态显示的ViewController,如UIImagePickerController等 ...

  4. Python 管理 MySQL

    Python MySQLdb 模块 Python pymysql 模块 Python SQLAlchemy 模块 Python ConfigParser 模块 Python 创建 MySQL 配置文件 ...

  5. Ajax 分析方法

    我们如何查看到 Ajax 请求: 以 https://m.weibo.cn/u/2830678474 这个网页为例,按 F12,加载网页,然后选择资源类型为 XHR 的就可以看到 Ajax 请求了 我 ...

  6. java 递归

    package com.j1.soa.resource.cms.service.oracle; import com.j1.base.dto.ServiceMessage; import com.j1 ...

  7. apache下配置php环境

    1. apache下载 http://httpd.apache.org/download.cgi 2. php下载 http://windows.php.net/download/ 3. 配置 apa ...

  8. C#后台传入数据JS接收

    今天碰到个问题,就是后台传入的数据,在JS中for循环的时候,下面那个j根本就不会往上加.所以只能将后台传入的对象,转换为json格式,由js进行解析后生成js中的对象 @{j=0;} for (va ...

  9. luanet性能测试

    测试环境 intel-i5 双核 2.53HZ 服务器客户端均在本机运行 测试内容:echo回射,每个包的字节数在20字节内 luanet 连接数 每秒回射数 1 19,000/s 10 12,500 ...

  10. sklearn提供的自带的数据集

    sklearn 的数据集有好多个种 自带的小数据集(packaged dataset):sklearn.datasets.load_<name> 可在线下载的数据集(Downloaded ...