Hadoop之旅(8)— HDFS API 实战操作
2017-08-30 16:31
483 查看
1、得到系统文件
/** * Get FileSystem (得到系统文件) * * @return * @throws Exception */ public static FileSystem getFileSystem() throws Exception { Configuration configuration = new Configuration(); configuration.set("fd.defaultFS","hdfs://chenzy-1:9000"); // get filesystem FileSystem fileSystem = FileSystem.get(configuration); System.out.println(fileSystem); return fileSystem; }
2、创建文件
//指定相对路径relative path 路径:/user/Administrator/文件名 private static void relativePath() throws Exception{ // get filesystem FileSystem fileSystem = getFileSystem(); boolean b = fileSystem.createNewFile(new Path("relative.txt")); System.out.println(b); fileSystem.close(); }
3、追加文件内容
//追加内容 private static void appendContent() throws Exception{ // get filesystem FileSystem fileSystem = getFileSystem(); Path path = new Path("/chenzy/mapreduce/wordcount/input/test.txt"); FSDataOutputStream append = fileSystem.append(path); append.write("this is append content ! 追加内容 !".getBytes()); append.close(); fileSystem.close(); } //添加内容 private static void bufferedContent() throws Exception{ // get filesystem FileSystem fileSystem = getFileSystem(); Path path = new Path("/chenzy/mapreduce/wordcount/input/test.txt"); FSDataOutputStream append = fileSystem.append(path); BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(append)); bufferedWriter.newLine(); bufferedWriter.write("this is append content ! 内容 !"); bufferedWriter.newLine(); bufferedWriter.write("这是换行的内容!"); bufferedWriter.newLine(); bufferedWriter.close(); fileSystem.close(); }
4、读取文件
//读取文件 private static void openRead() throws Exception{ // get filesystem FileSystem fileSystem = getFileSystem(); InputStream is = fileSystem.open(new Path("/chenzy/mapreduce/wordcount/input/test.txt")); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is)); String line = null; while ((line = bufferedReader.readLine()) != null){ System.out.println(line); } is.close(); bufferedReader.close(); fileSystem.close(); }
5、从本地拷贝到hdfs
//从本地拷贝到hdfs private static void copyFromlocal() throws Exception{ // get filesystem FileSystem fileSystem = getFileSystem(); fileSystem.copyFromLocalFile(new Path("E:\\from\\new.txt"),new Path("/chenzy/mapreduce/wordcount/newMkdirs/test.txt")); fileSystem.close(); } //从hdfs拷贝到本地 private static void copyTolocal() throws Exception{ // get filesystem FileSystem fileSystem = getFileSystem(); fileSystem.copyToLocalFile(new Path("/chenzy/mapreduce/wordcount/newMkdirs/test.txt"),new Path("E:\\from\\new2.txt")); fileSystem.close(); }
更多的例子:查看GitHub
相关文章推荐
- Hadoop系列-HDFS文件操作的JAVA API用法(七)
- Hadoop(四)HDFS的高级API操作
- 4000 hadoop入门(三)之 javaAPI操作Hdfs,进行文件操作
- hadoop - hadoop2.6 伪分布式 - Java API 操作 HDFS
- hadoop学习;hdfs操作;运行抛出权限异常: Permission denied;api查看源码方法;源码不停的向里循环;抽象类通过debug查找源码
- Hadoop实战-初级部分 之 HDFS API
- 第二篇:Hadoop HDFS常用JAVA api操作程序
- JAVA操作HDFS API(hadoop) HDFS API详解
- Hadoop实战-初级部分 之 HDFS API
- hadoop hdfs api基本操作
- hadoop hdfs API操作
- 2018-07-10期 HadoopHDFS客户端API操作
- JAVA操作HDFS API(hadoop)
- Hadoop学习二(java api调用操作HDFS)
- hadoop实战之hdfs常用操作工具类
- Hadoop学习记录(3)|HDFS API 操作|RPC调用
- hadoop的hdfs中的javaAPI操作
- Python API 操作Hadoop hdfs
- Hadoop Java API 操作 hdfs--1
- JAVA操作HDFS API(hadoop)