利用hadoop中的hdfs作为海量文件服务器的实践
2015-07-30 14:48
471 查看
当WEB集群的数据达到海量的时候,一台文件服务器负担会很大,所以可能会换分布式来存储,下面介绍的方法,就是用来调用hadoop的hdfs的存和取。
这个是存入hdfs的方法,仅供参考:
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class UploadImage
{
private static final Log logger = LogFactory.getLog(ShowImage.class);
public void uploadFile(InputStream stream, String fileName, String path)
throws IOException
{
HttpURLConnection con;
try
{
con = getConnection("http://192.168.1.96:50070/webhdfs/v1/",
path +
fileName, "CREATE", null, "PUT");
byte[] bytes = new byte[1024];
int rc = 0;
while ((rc = stream.read(bytes, 0, bytes.length)) > 0)
con.getOutputStream().write(bytes, 0, rc);
con.getInputStream();
con.disconnect();
} catch (IOException e) {
logger.info(e.getMessage());
e.printStackTrace();
}
stream.close();
}
public HttpURLConnection getConnection(String strurl, String path, String op, String parameters, String request)
{
URL url = null;
HttpURLConnection con = null;
StringBuffer sb = new StringBuffer();
try {
sb.append(strurl);
sb.append(path);
sb.append("?op=");
sb.append(op);
if (parameters != null)
sb.append(parameters);
url = new URL(sb.toString());
con = (HttpURLConnection)url.openConnection();
con.setRequestMethod(request);
con.setRequestProperty("accept", "*/*");
con.setRequestProperty("connection", "Keep-Alive");
String s = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)";
con.setRequestProperty("User-Agent", s);
con.setRequestProperty("Accept-Encoding", "gzip");
con.setDoInput(true);
con.setDoOutput(true);
con.setUseCaches(false);
} catch (IOException e) {
e.printStackTrace();
}
return con;
}
}
下面是读取hdfs的方法:
package image;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class ShowImage extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Log log = LogFactory.getLog(ShowImage.class);
public ShowImage() {
super();
}
protected void service(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
request.setCharacterEncoding("utf-8");
HttpURLConnection con = null;
ServletOutputStream out = null;
byte[] bytes = (byte[]) null;
String path = request.getRequestURI();
String[] arr = path.split("/");
String name = arr[arr.length - 1];
try {
out = response.getOutputStream();
con = getConnection("http://192.168.1.96:50070/webhdfs/v1/",
"tmp/"
+ name, "OPEN", null, "GET");
InputStream is = con.getInputStream();
bytes = new byte[1024];
while (is.read(bytes) != -1) {
out.write(bytes, 0, bytes.length);
}
out.flush();
out.close();
log.info("****Success****");
} catch (IOException e) {
e.printStackTrace();
}
}
public HttpURLConnection getConnection(String strurl, String path,
String op, String parameters, String request) {
URL url = null;
HttpURLConnection con = null;
StringBuffer sb = new StringBuffer();
try {
sb.append(strurl);
sb.append(path);
sb.append("?op=");
sb.append(op);
if (parameters != null)
sb.append(parameters);
url = new URL(sb.toString());
con = (HttpURLConnection) url.openConnection();
con.setRequestMethod(request);
con.setRequestProperty("accept", "*/*");
con.setRequestProperty("connection", "Keep-Alive");
String s = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)";
con.setRequestProperty("User-Agent", s);
con.setRequestProperty("Accept-Encoding", "gzip");
con.setDoInput(true);
con.setDoOutput(true);
con.setUseCaches(false);
} catch (IOException e) {
e.printStackTrace();
}
return con;
}
}
延伸阅读: http://blog.itpub.net/29806344/viewspace-1364778/
关于WEB集群中文件服务器的讨论
原文链接:http://blog.itpub.net/29806344/viewspace-1368266/
这个是存入hdfs的方法,仅供参考:
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class UploadImage
{
private static final Log logger = LogFactory.getLog(ShowImage.class);
public void uploadFile(InputStream stream, String fileName, String path)
throws IOException
{
HttpURLConnection con;
try
{
con = getConnection("http://192.168.1.96:50070/webhdfs/v1/",
path +
fileName, "CREATE", null, "PUT");
byte[] bytes = new byte[1024];
int rc = 0;
while ((rc = stream.read(bytes, 0, bytes.length)) > 0)
con.getOutputStream().write(bytes, 0, rc);
con.getInputStream();
con.disconnect();
} catch (IOException e) {
logger.info(e.getMessage());
e.printStackTrace();
}
stream.close();
}
public HttpURLConnection getConnection(String strurl, String path, String op, String parameters, String request)
{
URL url = null;
HttpURLConnection con = null;
StringBuffer sb = new StringBuffer();
try {
sb.append(strurl);
sb.append(path);
sb.append("?op=");
sb.append(op);
if (parameters != null)
sb.append(parameters);
url = new URL(sb.toString());
con = (HttpURLConnection)url.openConnection();
con.setRequestMethod(request);
con.setRequestProperty("accept", "*/*");
con.setRequestProperty("connection", "Keep-Alive");
String s = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)";
con.setRequestProperty("User-Agent", s);
con.setRequestProperty("Accept-Encoding", "gzip");
con.setDoInput(true);
con.setDoOutput(true);
con.setUseCaches(false);
} catch (IOException e) {
e.printStackTrace();
}
return con;
}
}
下面是读取hdfs的方法:
package image;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class ShowImage extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Log log = LogFactory.getLog(ShowImage.class);
public ShowImage() {
super();
}
protected void service(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
request.setCharacterEncoding("utf-8");
HttpURLConnection con = null;
ServletOutputStream out = null;
byte[] bytes = (byte[]) null;
String path = request.getRequestURI();
String[] arr = path.split("/");
String name = arr[arr.length - 1];
try {
out = response.getOutputStream();
con = getConnection("http://192.168.1.96:50070/webhdfs/v1/",
"tmp/"
+ name, "OPEN", null, "GET");
InputStream is = con.getInputStream();
bytes = new byte[1024];
while (is.read(bytes) != -1) {
out.write(bytes, 0, bytes.length);
}
out.flush();
out.close();
log.info("****Success****");
} catch (IOException e) {
e.printStackTrace();
}
}
public HttpURLConnection getConnection(String strurl, String path,
String op, String parameters, String request) {
URL url = null;
HttpURLConnection con = null;
StringBuffer sb = new StringBuffer();
try {
sb.append(strurl);
sb.append(path);
sb.append("?op=");
sb.append(op);
if (parameters != null)
sb.append(parameters);
url = new URL(sb.toString());
con = (HttpURLConnection) url.openConnection();
con.setRequestMethod(request);
con.setRequestProperty("accept", "*/*");
con.setRequestProperty("connection", "Keep-Alive");
String s = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)";
con.setRequestProperty("User-Agent", s);
con.setRequestProperty("Accept-Encoding", "gzip");
con.setDoInput(true);
con.setDoOutput(true);
con.setUseCaches(false);
} catch (IOException e) {
e.printStackTrace();
}
return con;
}
}
延伸阅读: http://blog.itpub.net/29806344/viewspace-1364778/
关于WEB集群中文件服务器的讨论
原文链接:http://blog.itpub.net/29806344/viewspace-1368266/
相关文章推荐
- 详解HDFS Short Circuit Local Reads
- Hadoop_2.1.0 MapReduce序列图
- 使用Hadoop搭建现代电信企业架构
- 单机版搭建Hadoop环境图文教程详解
- hadoop常见错误以及处理方法详解
- hadoop 单机安装配置教程
- hadoop的hdfs文件操作实现上传文件到hdfs
- hadoop实现grep示例分享
- Apache Hadoop版本详解
- linux下搭建hadoop环境步骤分享
- hadoop client与datanode的通信协议分析
- hadoop中一些常用的命令介绍
- Hadoop单机版和全分布式(集群)安装
- 用PHP和Shell写Hadoop的MapReduce程序
- hadoop map-reduce中的文件并发操作
- Hadoop1.2中配置伪分布式的实例
- java结合HADOOP集群文件上传下载
- 用python + hadoop streaming 分布式编程(一) -- 原理介绍,样例程序与本地调试
- Hadoop安装感悟
- hadoop安装lzo