您的位置:首页 > 运维架构

hadoop文件系统的管理----》可以做网盘的呦

2014-05-30 16:09 561 查看
今天给大家介绍一下通过hadoop的hdfs和Struts搭建一个网盘的基本框框

*************************************************************************************************************************

hdfs的基本操作

(1)获得给定路径下的文件列表  : throws FileNotFoundException, IOException {
String dst = "hdfs://localhost:9000/" + filename;
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FileStatus fileList[] = fs.listStatus(new Path(dst));
int size = fileList.length;

for (int i = 0; i < fileList.length; i++) {
if (fileList[i].getLen() == 0) {
//可以通过判断文件的大小来确定是否是文件还是文件夹
} else {

}
}
fs.close();
return value;(2)上传文件到hdfs上
String localSrc = "/home/red-zhu/chen.txt";
String dst = "hdfs://localhost:9000/user/" + name;
System.out.println(dst);
 Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), new Progressable() {
 public void progress() {
System.out.print(".");
}
});
IOUtils.copyBytes(in, out, 4096, true);
(3)下载文件到本地
String dst = "hdfs://localhost:9000/" + path;
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FSDataInputStream hdfsInStream = fs.open(new Path(dst));
return hdfsInStream;(4)删除给定路径的文件
String d = "localhost:9000"+path;
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dfspath), conf);
fs.deleteOnExit(new Path(dst));
fs.close();

*******************************************************************************************************************************************
struts部分

(2-1)上传实现代码

private List<File> image;
private List<String> imageContentType;
private List<String> imageFileName;

@Override
public String execute() throws Exception {
String dst = "hdfs://localhost:9000/user/qq1.txt";
String path = ServletActionContext.getServletContext().getRealPath(
"/images");
System.out.println(path);
System.out.println("保存路径为" + path);

if (image.size() > 0) {
/************************/
BufferedInputStream bis = null;
/***********************/
for (int i = 0; i < image.size(); i++) {
System.out.println("datas的个数" + image.size());
String filename = imageFileName.get(i);
FileInputStream fis = new FileInputStream(image.get(i));
bis = new BufferedInputStream(fis);
// FileUtils.copyFile(image.get(i), saveFile);
hdfsUtil.uploadToHdfs(bis, filename);
}
}

// for (int i = 0; i < image.size(); i++)
// {
// File data=image.get(i);
// String fileName=imageFileName.get(i);
// fileName=path+"\\"+fileName;
// data.renameTo(new File(fileName));
// }
return SUCCESS;
}

public List<File> getImage() {
return image;
}

public void setImage(List<File> image) {
this.image = image;
}

public List<String> getImageContentType() {
return imageContentType;
}

public void setImageContentType(List<String> imageContentType) {
this.imageContentType = imageContentType;
}

public List<String> getImageFileName() {
return imageFileName;
}

public void setImageFileName(List<String> imageFileName) {
this.imageFileName = imageFileName;
}

(2-2)下载模块代码

private String fileName;
private String flag;

public String getFlag() {
return flag;
}

public void setFlag(String flag) {
this.flag = flag;
}

public String getFileName() {
return fileName;
}

public void setFileName(String fileName) {
this.fileName = fileName;
}

public InputStream getInputStream() throws FileNotFoundException,
IOException {
return hdfsUtil.readFromHdfs((String) (ActionContext.getContext()
.getSession().get("path"))+flag);
}

public String execute() {
return "success";
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息