您的位置:首页 > 产品设计 > UI/UE

Hadoop 写SequenceFile文件 源代码

2013-07-03 20:54 489 查看
package com.tdxx.hadoop.sequencefile;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer;

public class SequenceFileStudy{

/**
* 写入到sequence file
*
* @param filePath
* @param conf
* @param datas
*/
public static void write2SequenceFile(String filePath, Configuration conf,
LongWritable key,LongWritable val) {
FileSystem fs = null;
Writer writer = null;
Path path = null;
//LongWritable idKey = new LongWritable(0);

try {
fs = FileSystem.get(conf);
path = new Path(filePath);
writer = SequenceFile.createWriter(fs, conf, path,
LongWritable.class, LongWritable.class);
writer.append(key, val);

} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
IOUtils.closeStream(writer);
}
}

private static Configuration getDefaultConf() {
Configuration conf = new Configuration();
conf.set("mapred.job.tracker", "local");
conf.set("fs.default.name", "file:///");
return conf;
}

/**
* @param args
*/
public static void main(String[] args) {
String filePath = "data/longValue.sequence"; // 文件路径

// 生成数据
for (int i = 1; i <= 20; i++) {
// 写入到sequence file
LongWritable key = new LongWritable(i);
LongWritable value = new LongWritable(i+1);
write2SequenceFile(filePath, getDefaultConf(), key,value);
}
}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: