package com.hy.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException; public class HDFSCommand { public static final Logger log = LoggerFactory.getLogger(HDFSCommand.class); public static void main(String[] args) throws Exception {
String hdfsURI = "hdfs://10.1.23.240:9000";
String srcPath = "D:" + File.separator + "readme.txt";
String descPath = "/xhy";
String data = "haohaohaohaohao\r\n善字\r\n善生\r\n善行\r\n守善\r\n愿善";
Configuration conf = new Configuration();
copyFromLocalFile(hdfsURI, srcPath, descPath, conf);
uploadFile(hdfsURI, data, descPath, conf);
RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = listFile(hdfsURI, descPath, conf, true);
while (locatedFileStatusRemoteIterator.hasNext()) {
LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
System.out.println("listFile:" + next.toString());
}
FileStatus[] fileStatuses = listFileAndFolder(hdfsURI, descPath, conf);
for (FileStatus f : fileStatuses) {
System.out.println("listFileAndFolder:" + f.toString());
} } /**
* 本地指定路径文件上传到hdfs
*
* @param hdfsURI
* @param srcPath
* @param descPath
* @param conf
*/
public static void copyFromLocalFile(String hdfsURI, String srcPath, String descPath, Configuration conf) throws URISyntaxException, IOException {
log.info(">> copyFromLocalFile, srcPath is {}, descPath is {}", srcPath, descPath);
FileSystem fs = FileSystem.get(new URI(hdfsURI), conf);
fs.copyFromLocalFile(new Path(srcPath), new Path(descPath));
log.info("<< copyFromLocalFile success");
fs.close();
/*
* 底层是通过
* fs.open(new Path(srcPath), 4096);
* fs.create(new Path(descPath));
* IOUtils.copyBytes(in, out, conf, true);
*/
} /**
* 将数据写入到hdfs
*
* @param hdfsURI
* @param data
* @param descPath
* @param conf
*/
public static void uploadFile(String hdfsURI, String data, String descPath, Configuration conf) throws Exception {
log.info(">> uploadFile, descPath is {}, data is {}", descPath, data);
FileSystem fs = FileSystem.get(new URI(hdfsURI), conf);
/*FSDataOutputStream fsOutputStream = fs.create(new Path(descPath), new Progressable() {
@Override
public void progress() {
log.info("<< 写入hdfs成功,文件路径为:{}", descPath);
}
});*/
FSDataOutputStream fsOutputStream = fs.create(new Path(descPath),
() -> log.info("<< 写入hdfs成功,文件路径为:{}", descPath));
fsOutputStream.write(data.getBytes(), 0, data.getBytes().length);
/*
* 以下几种方式会出现中文乱码
* fsOutputStream.writeBytes(data);
* fsOutputStream.writeUTF(data);
* fsOutputStream.writeChars(data);
*/
fsOutputStream.close();
fs.close();
} /**
* 查找hdfs指定路径下的文件
*
* @param hdfsURI
* @param path
* @param conf
* @param recursive 是否递归查找
* @throws Exception
*/
public static RemoteIterator<LocatedFileStatus> listFile(String hdfsURI, String path, Configuration conf, boolean recursive) throws Exception {
log.info(">> listFile, path is {}, recursive is {}", path, recursive);
FileSystem fs = FileSystem.get(new URI(hdfsURI), conf);
RemoteIterator<LocatedFileStatus> result = fs.listFiles(new Path(path), recursive);
log.info("<< listFile, result is {}", result);
return result;
} /**
* 查找hdfs指定路径下的文件和文件夹
*
* @param hdfsURI
* @param path
* @param conf
*/
public static FileStatus[] listFileAndFolder(String hdfsURI, String path, Configuration conf) throws Exception {
log.info(">> listFileAndFolder, path is {}", path);
FileSystem fs = FileSystem.get(new URI(hdfsURI), conf);
FileStatus[] result = fs.listStatus(new Path(path));
log.info("<< listFileAndFolder, result is {}", result.toString());
return result;
// 方法二
} /**
* 创建文件夹
*
* @param hdfsURI
* @param path
* @param conf
* @throws Exception
*/
public static void mkDir(String hdfsURI, String path, Configuration conf) throws Exception {
log.info(">> mkDir, path is {}", path);
FileSystem fs = FileSystem.get(new URI(hdfsURI), conf);
boolean result = fs.mkdirs(new Path(path));
if (result) {
log.info("<< mkDir {} success", path);
} else {
log.error("<< mkDir {} error", path);
}
} /**
* 删除指定路径
*
* @param hdfsURI
* @param path
* @param conf
* @throws IOException
*/
public static void delete(String hdfsURI, String path, Configuration conf) throws IOException {
log.info(">> delete, path is {}", path);
conf.set("fs.defaultFS", hdfsURI);
FileSystem fs = FileSystem.get(conf);
if (!fs.exists(new Path(path))) {
log.info("<< delete {} error, path no exists", path);
return;
}
boolean result = fs.delete(new Path(path), true);
if (result) {
log.info("<< delete {} success", path);
} else {
log.error("<< delete {} error", path);
}
} /**
* 从hdfs上面下载
*
* @param hdfsURI
* @param srcPath
* @param descPath
* @param conf
* @throws Exception
*/
public static void downloadFile(String hdfsURI, String srcPath, String descPath, Configuration conf) throws Exception {
log.info(">> downloadFile, srcPath is {}, descPath is {}", srcPath, descPath);
FileSystem fs = FileSystem.get(new URI(hdfsURI), conf);
FSDataInputStream in = fs.open(new Path(srcPath));
OutputStream out = new FileOutputStream(new File(descPath));
IOUtils.copyBytes(in, out, conf);
} public static void catFile(String hdfsURI, String path, Configuration conf) throws Exception {
log.info(">> catFile, path is {}", path);
FileSystem fs = FileSystem.get(URI.create(hdfsURI), conf);
FSDataInputStream in = fs.open(new Path(path));
try {
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
fs.close();
}
} }

Java代码操作HDFS的更多相关文章

  1. Java代码操作HDFS测试类

    1.Java代码操作HDFS需要用到Jar包和Java类 Jar包: hadoop-common-2.6.0.jar和hadoop-hdfs-2.6.0.jar Java类: java.net.URL ...

  2. Java代码操作HDFS(在/user/root/下面創建目錄)

    1.创建HDFS目录并打成jar包 package Hdfs; import java.io.IOException; import java.net.URI; import org.apache.h ...

  3. 大数据之路week07--day01(HDFS学习,Java代码操作HDFS,将HDFS文件内容存入到Mysql)

    一.HDFS概述 数据量越来越多,在一个操作系统管辖的范围存不下了,那么就分配到更多的操作系统管理的磁盘中,但是不方便管理和维护,因此迫切需要一种系统来管理多台机器上的文件,这就是分布式文件管理系统 ...

  4. 使用Java API操作HDFS文件系统

    使用Junit封装HFDS import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org ...

  5. 使用java代码操作Redis

    1导入pom.xml依赖 <dependency> <groupId>redis.clients</groupId> <artifactId>jedis ...

  6. java代码操作Redis

    1.导入需要的pom依赖 <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEn ...

  7. Java代码操作zookeeper

    .personSunflowerP { background: rgba(51, 153, 0, 0.66); border-bottom: 1px solid rgba(0, 102, 0, 1); ...

  8. Hadoop Java API操作HDFS文件系统(Mac)

    1.下载Hadoop的压缩包 tar.gz   https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/stable/ 2.关联jar包 在 ...

  9. 使用Java Api 操作HDFS

    如题 我就是一个标题党  就是使用JavaApi操作HDFS,使用的是MAVEN,操作的环境是Linux 首先要配置好Maven环境,我使用的是已经有的仓库,如果你下载的jar包 速度慢,可以改变Ma ...

随机推荐

  1. 如何解决一个从SkylineGlobe5版本升级到7版本遇到的小问题

    前些天,有朋友问,用Skyline5版本开发的WinForm程序,升级到7版本的时候,工程提示下面这样“创建组件AxHost失败”的错误,该如何解决呢? 后来经过百度搜索,找到了这样的答案, 测试发现 ...

  2. 轻量级ORM框架 Bankinate

    [前言] 前面讲过ORM的前世今生,对ORM框架不了解的朋友可以参考博文:https://www.cnblogs.com/7tiny/p/9551754.html 今天,我们主要通过设计一款轻量级的O ...

  3. Photoshop调出外景婚片蓝色小清新艺术效果

    春季婚纱旺季来了,好多童鞋给我抱怨说客片太难转色了,春天的小清新感都转不了,其实并不难,运用好互补色来进行加减色,能很快调整好照片的偏色,互补色也可称为对比色,后期调色的加也可称为减,如加蓝=减黄.加 ...

  4. 软工+C(1): 题目设计、点评和评分

    // 下一篇:分数和checklist 如何设计题目 教学中的一个问题是老师出题太简单了,题目设计一开始上来就不紧凑,我认为一个好的课程应该上来就给你紧凑感,而不是先上来"轻松2-3周&qu ...

  5. vue-electron脚手架安装及说明 打包基于Vue的 桌面应用程序

    今天这篇文章是讲述一下 融合了vue-cli+electron的一种新的脚手架,省去许多繁琐配置,即vue-electron. 下面就说一下安装和使用,假设你的电脑已经安装node.js,并且已经全局 ...

  6. python之路3-元组、列表、字典、集合

    1.元组 特点:一旦创建,内容不可修改,又叫只读列表 a= ('wang','zhang','zhao') print(a.count('zhao')) print(a.index('wang')) ...

  7. Time travel HDU - 4418(高斯消元)

    Agent K is one of the greatest agents in a secret organization called Men in Black. Once he needs to ...

  8. [Ynoi2018]五彩斑斓的世界

    题目描述 二阶堂真红给了你一个长为n的序列a,有m次操作 1.把区间[l,r]中大于x的数减去x 2.查询区间[l,r]中x的出现次数 题解 做YNOI真**爽... 我们发现这道题的操作非常诡异,把 ...

  9. java 11 ZGC(可伸缩,低延迟的gc)

    ZGC, A Scalable Low-Latency Garbage Collector(Experimental) 可伸缩,低延迟的gc ZGC, 这应该是JDK11最为瞩目的特性, 没有之一. ...

  10. Linux基本命令总结(六)

    接上篇: 27,diff在命令行中打印每一个行的改动.最新版本的diff还支持二进制文件.diff程序的输出被称为补丁 (patch),因为Linux系统中还有一个patch程序,可以根据diff的输 ...