HDFS文件操作工具类
2018-01-26 17:25
387 查看
package hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import java.io.IOException;
import java.net.URI;
/**
* Created by xiaohei on 16/3/9.
* HDFS操作类
*/
public class HdfsUtil {
private static final String HDFS = "hdfs://master:9000/";
private static final Configuration conf = new Configuration();
/**
* 创建文件夹
*
* @param folder 文件夹名
*/
public static void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
else{
System.out.println(folder+"is"+"exists!");
}
fs.close();
}
/**
* 删除文件夹
*
* @param folder 文件夹名
*/
public static void rmr(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder+"---successfull!");
fs.close();
}
/**
* 重命名文件
* @param src 源文件名
* @param dst 目标文件名
* */
public static void rename(String src, String dst) throws IOException {
Path name1 = new Path(src);
Path name2 = new Path(dst);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.rename(name1, name2);
System.out.println("Rename: from " + src + " to " + dst);
fs.close();
}
/**
* 列出该路径的文件信息
*
* @param folder 文件夹名
*/
public static void ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out.println("==========================================================");
for (FileStatus f : list) {
System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDirectory(), f.getLen());
}
System.out.println("==========================================================");
fs.close();
}
/**
* 创建文件
*
* @param file 文件名
* @param content 文件内容
*/
public static void createFile(String file, String content) throws IOException {
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
byte[] buff = content.getBytes();
FSDataOutputStream os = null;
try {
os = fs.create(new Path(file));
os.write(buff, 0, buff.length);
System.out.println("Create: " + file);
} finally {
if (os != null)
os.close();
}
fs.close();
}
/**
* 复制本地文件到hdfs
*
* @param local 本地文件路径
* @param remote hdfs目标路径
*/
public static void copyFile(String local, String remote) throws IOException {
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
}
/**
* 从hdfs下载文件到本地
*
* @param remote hdfs文件路径
* @param local 本地目标路径
*/
public static void download(String remote, String local) throws IOException {
Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.copyToLocalFile(path, new Path(local));
System.out.println("download: from" + remote + " to " + local);
fs.close();
}
/**
* 查看hdfs文件内容
*
* @param remoteFile hdfs文件路径
*/
public static void cat(String remoteFile) throws IOException {
Path path = new Path(remoteFile);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
FSDataInputStream fsdis = null;
System.out.println("cat: " + remoteFile);
try {
fsdis = fs.open(path);
IOUtils.copyBytes(fsdis, System.out, 4096, false);
} finally {
IOUtils.closeStream(fsdis);
fs.close();
}
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import java.io.IOException;
import java.net.URI;
/**
* Created by xiaohei on 16/3/9.
* HDFS操作类
*/
public class HdfsUtil {
private static final String HDFS = "hdfs://master:9000/";
private static final Configuration conf = new Configuration();
/**
* 创建文件夹
*
* @param folder 文件夹名
*/
public static void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
else{
System.out.println(folder+"is"+"exists!");
}
fs.close();
}
/**
* 删除文件夹
*
* @param folder 文件夹名
*/
public static void rmr(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder+"---successfull!");
fs.close();
}
/**
* 重命名文件
* @param src 源文件名
* @param dst 目标文件名
* */
public static void rename(String src, String dst) throws IOException {
Path name1 = new Path(src);
Path name2 = new Path(dst);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.rename(name1, name2);
System.out.println("Rename: from " + src + " to " + dst);
fs.close();
}
/**
* 列出该路径的文件信息
*
* @param folder 文件夹名
*/
public static void ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out.println("==========================================================");
for (FileStatus f : list) {
System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDirectory(), f.getLen());
}
System.out.println("==========================================================");
fs.close();
}
/**
* 创建文件
*
* @param file 文件名
* @param content 文件内容
*/
public static void createFile(String file, String content) throws IOException {
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
byte[] buff = content.getBytes();
FSDataOutputStream os = null;
try {
os = fs.create(new Path(file));
os.write(buff, 0, buff.length);
System.out.println("Create: " + file);
} finally {
if (os != null)
os.close();
}
fs.close();
}
/**
* 复制本地文件到hdfs
*
* @param local 本地文件路径
* @param remote hdfs目标路径
*/
public static void copyFile(String local, String remote) throws IOException {
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
}
/**
* 从hdfs下载文件到本地
*
* @param remote hdfs文件路径
* @param local 本地目标路径
*/
public static void download(String remote, String local) throws IOException {
Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
fs.copyToLocalFile(path, new Path(local));
System.out.println("download: from" + remote + " to " + local);
fs.close();
}
/**
* 查看hdfs文件内容
*
* @param remoteFile hdfs文件路径
*/
public static void cat(String remoteFile) throws IOException {
Path path = new Path(remoteFile);
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
FSDataInputStream fsdis = null;
System.out.println("cat: " + remoteFile);
try {
fsdis = fs.open(path);
IOUtils.copyBytes(fsdis, System.out, 4096, false);
} finally {
IOUtils.closeStream(fsdis);
fs.close();
}
}
}
相关文章推荐
- Java实现HDFS文件操作工具类
- 对Properties文件的增删改查操作工具类
- [Java工具类]Spring常用工具类 1.文件资源操作和Web相关工具类
- Hadoop HDFS常用文件操作命令
- Hadoop HDFS文件操作
- hadoop-3.0.0-beta1运维手册(007):hdfs3.0.0基本操作-上传、下载、删除文件或目录
- HDFS的文件操作流(4)——写操作(数据节点)
- Hadoop之HDFS文件操作
- Spring 的优秀工具类盘点,第 1 部分: 文件资源操作和 Web 相关工具类
- 第一章 属性文件操作工具类
- HADOOP之HDFS文件操作
- 利用 Hadoop FileSystem listStatus 遍历文件目录 实现HDFS操作
- 使用Hadoop API操作HDFS文件时遇到的问题及解决总结
- HDFS文件和HIVE表的一些操作
- Hadoop之HDFS文件操作常有两种方式(转载)
- hdfs基本的文件系统操作
- java对HDFS中文件的操作
- java中关于文件操作常用工具类
- 排坑!java操作hadoop中的hdfs文件系统
- Java工具类——文件操作